diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..efb98a6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,36 @@ +# Python +.venv/ +venv/ +__pycache__/ +*.py[cod] +*$py.class +*.egg-info/ +dist/ +build/ +.eggs/ + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# Testing +.pytest_cache/ +.coverage +htmlcov/ + +# OS +.DS_Store +Thumbs.db + +# Logs +*.log +*.pid + +# Node.js +node_modules/ +npm-debug.log* + +# Temporary files +tmp/ diff --git a/.venv/bin/Activate.ps1 b/.venv/bin/Activate.ps1 deleted file mode 100644 index b49d77b..0000000 --- a/.venv/bin/Activate.ps1 +++ /dev/null @@ -1,247 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove VIRTUAL_ENV_PROMPT altogether. - if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { - Remove-Item -Path env:VIRTUAL_ENV_PROMPT - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } - $env:VIRTUAL_ENV_PROMPT = $Prompt -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/.venv/bin/activate b/.venv/bin/activate deleted file mode 100644 index ccc2521..0000000 --- a/.venv/bin/activate +++ /dev/null @@ -1,70 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# You cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # Call hash to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - hash -r 2> /dev/null - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - unset VIRTUAL_ENV_PROMPT - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -# on Windows, a path can contain colons and backslashes and has to be converted: -if [ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ] ; then - # transform D:\path\to\venv to /d/path/to/venv on MSYS - # and to /cygdrive/d/path/to/venv on Cygwin - export VIRTUAL_ENV=$(cygpath "/workspaces/Political_App/.venv") -else - # use the path as-is - export VIRTUAL_ENV="/workspaces/Political_App/.venv" -fi - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1="(.venv) ${PS1:-}" - export PS1 - VIRTUAL_ENV_PROMPT="(.venv) " - export VIRTUAL_ENV_PROMPT -fi - -# Call hash to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -hash -r 2> /dev/null diff --git a/.venv/bin/activate.csh b/.venv/bin/activate.csh deleted file mode 100644 index 4fbc13e..0000000 --- a/.venv/bin/activate.csh +++ /dev/null @@ -1,27 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. - -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/workspaces/Political_App/.venv" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - set prompt = "(.venv) $prompt" - setenv VIRTUAL_ENV_PROMPT "(.venv) " -endif - -alias pydoc python -m pydoc - -rehash diff --git a/.venv/bin/activate.fish b/.venv/bin/activate.fish deleted file mode 100644 index 1795a6b..0000000 --- a/.venv/bin/activate.fish +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source /bin/activate.fish" *from fish* -# (https://fishshell.com/). You cannot run it directly. - -function deactivate -d "Exit virtual environment and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - set -e _OLD_FISH_PROMPT_OVERRIDE - # prevents error when using nested fish instances (Issue #93858) - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - end - - set -e VIRTUAL_ENV - set -e VIRTUAL_ENV_PROMPT - if test "$argv[1]" != "nondestructive" - # Self-destruct! - functions -e deactivate - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV "/workspaces/Political_App/.venv" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# Unset PYTHONHOME if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # Save the current fish_prompt function as the function _old_fish_prompt. - functions -c fish_prompt _old_fish_prompt - - # With the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command. - set -l old_status $status - - # Output the venv prompt; color taken from the blue of the Python logo. - printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal) - - # Restore the return status of the previous command. - echo "exit $old_status" | . - # Output the original/"old" prompt. - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - set -gx VIRTUAL_ENV_PROMPT "(.venv) " -end diff --git a/.venv/bin/flask b/.venv/bin/flask deleted file mode 100755 index 54c6579..0000000 --- a/.venv/bin/flask +++ /dev/null @@ -1,7 +0,0 @@ -#!/workspaces/Political_App/.venv/bin/python3 -import sys -from flask.cli import main -if __name__ == '__main__': - if sys.argv[0].endswith('.exe'): - sys.argv[0] = sys.argv[0][:-4] - sys.exit(main()) diff --git a/.venv/bin/pip b/.venv/bin/pip deleted file mode 100755 index cd0cc39..0000000 --- a/.venv/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/workspaces/Political_App/.venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.venv/bin/pip3 b/.venv/bin/pip3 deleted file mode 100755 index cd0cc39..0000000 --- a/.venv/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/workspaces/Political_App/.venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.venv/bin/pip3.12 b/.venv/bin/pip3.12 deleted file mode 100755 index cd0cc39..0000000 --- a/.venv/bin/pip3.12 +++ /dev/null @@ -1,8 +0,0 @@ -#!/workspaces/Political_App/.venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.venv/bin/python b/.venv/bin/python deleted file mode 120000 index 7ae5fe3..0000000 --- a/.venv/bin/python +++ /dev/null @@ -1 +0,0 @@ -/home/codespace/.python/current/bin/python \ No newline at end of file diff --git a/.venv/bin/python3 b/.venv/bin/python3 deleted file mode 120000 index d8654aa..0000000 --- a/.venv/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -python \ No newline at end of file diff --git a/.venv/bin/python3.12 b/.venv/bin/python3.12 deleted file mode 120000 index d8654aa..0000000 --- a/.venv/bin/python3.12 +++ /dev/null @@ -1 +0,0 @@ -python \ No newline at end of file diff --git a/.venv/bin/wheel b/.venv/bin/wheel deleted file mode 100755 index 04411a5..0000000 --- a/.venv/bin/wheel +++ /dev/null @@ -1,8 +0,0 @@ -#!/workspaces/Political_App/.venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from wheel.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.venv/lib/python3.12/site-packages/_distutils_hack/__init__.py b/.venv/lib/python3.12/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index 94f71b9..0000000 --- a/.venv/lib/python3.12/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,239 +0,0 @@ -# don't import any costly modules -import os -import sys - -report_url = ( - "https://github.com/pypa/setuptools/issues/new?template=distutils-deprecation.yml" -) - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - import warnings - - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils." - ) - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - import warnings - - warnings.warn( - "Setuptools is replacing distutils. Support for replacing " - "an already imported distutils is deprecated. In the future, " - "this condition will fail. " - f"Register concerns at {report_url}" - ) - mods = [ - name - for name in sys.modules - if name == "distutils" or name.startswith("distutils.") - ] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') - if which == 'stdlib': - import warnings - - warnings.warn( - "Reliance on distutils from stdlib is deprecated. Users " - "must rely on setuptools to provide the distutils module. " - "Avoid importing distutils or import setuptools first, " - "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. " - f"Register concerns at {report_url}" - ) - return which == 'local' - - -def ensure_local_distutils(): - import importlib - - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - with shim(): - importlib.import_module('distutils') - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - assert 'setuptools._distutils.log' not in sys.modules - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class _TrivialRe: - def __init__(self, *patterns) -> None: - self._patterns = patterns - - def match(self, string): - return all(pat in string for pat in self._patterns) - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - # optimization: only consider top level modules and those - # found in the CPython test suite. - if path is not None and not fullname.startswith('test.'): - return None - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - if self.is_cpython(): - return None - - import importlib - import importlib.abc - import importlib.util - - try: - mod = importlib.import_module('setuptools._distutils') - except Exception: - # There are a couple of cases where setuptools._distutils - # may not be present: - # - An older Setuptools without a local distutils is - # taking precedence. Ref #2957. - # - Path manipulation during sitecustomize removes - # setuptools from the path but only after the hook - # has been loaded. Ref #2980. - # In either case, fall back to stdlib behavior. - return None - - class DistutilsLoader(importlib.abc.Loader): - def create_module(self, spec): - mod.__name__ = 'distutils' - return mod - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader( - 'distutils', DistutilsLoader(), origin=mod.__file__ - ) - - @staticmethod - def is_cpython(): - """ - Suppress supplying distutils for CPython (build and tests). - Ref #2965 and #3007. - """ - return os.path.isfile('pybuilddir.txt') - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if sys.version_info >= (3, 12) or self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @classmethod - def pip_imported_during_build(cls): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - - return any( - cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) - ) - - @staticmethod - def frame_file_is_setup(frame): - """ - Return True if the indicated frame suggests a setup.py file. - """ - # some frames may not have __file__ (#2940) - return frame.f_globals.get('__file__', '').endswith('setup.py') - - def spec_for_sensitive_tests(self): - """ - Ensure stdlib distutils when running select tests under CPython. - - python/cpython#91169 - """ - clear_distutils() - self.spec_for_distutils = lambda: None - - sensitive_tests = ( - [ - 'test.test_distutils', - 'test.test_peg_generator', - 'test.test_importlib', - ] - if sys.version_info < (3, 10) - else [ - 'test.test_distutils', - ] - ) - - -for name in DistutilsMetaFinder.sensitive_tests: - setattr( - DistutilsMetaFinder, - f'spec_for_{name}', - DistutilsMetaFinder.spec_for_sensitive_tests, - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - DISTUTILS_FINDER in sys.meta_path or insert_shim() - - -class shim: - def __enter__(self) -> None: - insert_shim() - - def __exit__(self, exc: object, value: object, tb: object) -> None: - _remove_shim() - - -def insert_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def _remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass - - -if sys.version_info < (3, 12): - # DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632) - remove_shim = _remove_shim diff --git a/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 5c41706..0000000 Binary files a/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc b/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc deleted file mode 100644 index 3dafa21..0000000 Binary files a/.venv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/_distutils_hack/override.py b/.venv/lib/python3.12/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/.venv/lib/python3.12/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/LICENSE.txt b/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/LICENSE.txt deleted file mode 100644 index 79c9825..0000000 --- a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2010 Jason Kirtland - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/METADATA deleted file mode 100644 index 6d343f5..0000000 --- a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.3 -Name: blinker -Version: 1.9.0 -Summary: Fast, simple object-to-object and broadcast signaling -Author: Jason Kirtland -Maintainer-email: Pallets Ecosystem -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://blinker.readthedocs.io -Project-URL: Source, https://github.com/pallets-eco/blinker/ - -# Blinker - -Blinker provides a fast dispatching system that allows any number of -interested parties to subscribe to events, or "signals". - - -## Pallets Community Ecosystem - -> [!IMPORTANT]\ -> This project is part of the Pallets Community Ecosystem. Pallets is the open -> source organization that maintains Flask; Pallets-Eco enables community -> maintenance of related projects. If you are interested in helping maintain -> this project, please reach out on [the Pallets Discord server][discord]. -> -> [discord]: https://discord.gg/pallets - - -## Example - -Signal receivers can subscribe to specific senders or receive signals -sent by any sender. - -```pycon ->>> from blinker import signal ->>> started = signal('round-started') ->>> def each(round): -... print(f"Round {round}") -... ->>> started.connect(each) - ->>> def round_two(round): -... print("This is round two.") -... ->>> started.connect(round_two, sender=2) - ->>> for round in range(1, 4): -... started.send(round) -... -Round 1! -Round 2! -This is round two. -Round 3! -``` - diff --git a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/RECORD deleted file mode 100644 index 7cfb714..0000000 --- a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 -blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633 -blinker-1.9.0.dist-info/RECORD,, -blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 -blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317 -blinker/__pycache__/__init__.cpython-312.pyc,, -blinker/__pycache__/_utilities.cpython-312.pyc,, -blinker/__pycache__/base.cpython-312.pyc,, -blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675 -blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132 -blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/WHEEL deleted file mode 100644 index e3c6fee..0000000 --- a/.venv/lib/python3.12/site-packages/blinker-1.9.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.10.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/blinker/__init__.py b/.venv/lib/python3.12/site-packages/blinker/__init__.py deleted file mode 100644 index 1772fa4..0000000 --- a/.venv/lib/python3.12/site-packages/blinker/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from .base import ANY -from .base import default_namespace -from .base import NamedSignal -from .base import Namespace -from .base import Signal -from .base import signal - -__all__ = [ - "ANY", - "default_namespace", - "NamedSignal", - "Namespace", - "Signal", - "signal", -] diff --git a/.venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index da55ec8..0000000 Binary files a/.venv/lib/python3.12/site-packages/blinker/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc b/.venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc deleted file mode 100644 index 7d5ad63..0000000 Binary files a/.venv/lib/python3.12/site-packages/blinker/__pycache__/_utilities.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc b/.venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 7bb7882..0000000 Binary files a/.venv/lib/python3.12/site-packages/blinker/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/blinker/_utilities.py b/.venv/lib/python3.12/site-packages/blinker/_utilities.py deleted file mode 100644 index 000c902..0000000 --- a/.venv/lib/python3.12/site-packages/blinker/_utilities.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import inspect -import typing as t -from weakref import ref -from weakref import WeakMethod - -T = t.TypeVar("T") - - -class Symbol: - """A constant symbol, nicer than ``object()``. Repeated calls return the - same instance. - - >>> Symbol('foo') is Symbol('foo') - True - >>> Symbol('foo') - foo - """ - - symbols: t.ClassVar[dict[str, Symbol]] = {} - - def __new__(cls, name: str) -> Symbol: - if name in cls.symbols: - return cls.symbols[name] - - obj = super().__new__(cls) - cls.symbols[name] = obj - return obj - - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return self.name - - def __getnewargs__(self) -> tuple[t.Any, ...]: - return (self.name,) - - -def make_id(obj: object) -> c.Hashable: - """Get a stable identifier for a receiver or sender, to be used as a dict - key or in a set. - """ - if inspect.ismethod(obj): - # The id of a bound method is not stable, but the id of the unbound - # function and instance are. - return id(obj.__func__), id(obj.__self__) - - if isinstance(obj, (str, int)): - # Instances with the same value always compare equal and have the same - # hash, even if the id may change. - return obj - - # Assume other types are not hashable but will always be the same instance. - return id(obj) - - -def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]: - if inspect.ismethod(obj): - return WeakMethod(obj, callback) # type: ignore[arg-type, return-value] - - return ref(obj, callback) diff --git a/.venv/lib/python3.12/site-packages/blinker/base.py b/.venv/lib/python3.12/site-packages/blinker/base.py deleted file mode 100644 index d051b94..0000000 --- a/.venv/lib/python3.12/site-packages/blinker/base.py +++ /dev/null @@ -1,512 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import sys -import typing as t -import weakref -from collections import defaultdict -from contextlib import contextmanager -from functools import cached_property -from inspect import iscoroutinefunction - -from ._utilities import make_id -from ._utilities import make_ref -from ._utilities import Symbol - -F = t.TypeVar("F", bound=c.Callable[..., t.Any]) - -ANY = Symbol("ANY") -"""Symbol for "any sender".""" - -ANY_ID = 0 - - -class Signal: - """A notification emitter. - - :param doc: The docstring for the signal. - """ - - ANY = ANY - """An alias for the :data:`~blinker.ANY` sender symbol.""" - - set_class: type[set[t.Any]] = set - """The set class to use for tracking connected receivers and senders. - Python's ``set`` is unordered. If receivers must be dispatched in the order - they were connected, an ordered set implementation can be used. - - .. versionadded:: 1.7 - """ - - @cached_property - def receiver_connected(self) -> Signal: - """Emitted at the end of each :meth:`connect` call. - - The signal sender is the signal instance, and the :meth:`connect` - arguments are passed through: ``receiver``, ``sender``, and ``weak``. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver connects.") - - @cached_property - def receiver_disconnected(self) -> Signal: - """Emitted at the end of each :meth:`disconnect` call. - - The sender is the signal instance, and the :meth:`disconnect` arguments - are passed through: ``receiver`` and ``sender``. - - This signal is emitted **only** when :meth:`disconnect` is called - explicitly. This signal cannot be emitted by an automatic disconnect - when a weakly referenced receiver or sender goes out of scope, as the - instance is no longer be available to be used as the sender for this - signal. - - An alternative approach is available by subscribing to - :attr:`receiver_connected` and setting up a custom weakref cleanup - callback on weak receivers and senders. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver disconnects.") - - def __init__(self, doc: str | None = None) -> None: - if doc: - self.__doc__ = doc - - self.receivers: dict[ - t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any] - ] = {} - """The map of connected receivers. Useful to quickly check if any - receivers are connected to the signal: ``if s.receivers:``. The - structure and data is not part of the public API, but checking its - boolean value is. - """ - - self.is_muted: bool = False - self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {} - - def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F: - """Connect ``receiver`` to be called when the signal is sent by - ``sender``. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends. - """ - receiver_id = make_id(receiver) - sender_id = ANY_ID if sender is ANY else make_id(sender) - - if weak: - self.receivers[receiver_id] = make_ref( - receiver, self._make_cleanup_receiver(receiver_id) - ) - else: - self.receivers[receiver_id] = receiver - - self._by_sender[sender_id].add(receiver_id) - self._by_receiver[receiver_id].add(sender_id) - - if sender is not ANY and sender_id not in self._weak_senders: - # store a cleanup for weakref-able senders - try: - self._weak_senders[sender_id] = make_ref( - sender, self._make_cleanup_sender(sender_id) - ) - except TypeError: - pass - - if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: - try: - self.receiver_connected.send( - self, receiver=receiver, sender=sender, weak=weak - ) - except TypeError: - # TODO no explanation or test for this - self.disconnect(receiver, sender) - raise - - return receiver - - def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]: - """Connect the decorated function to be called when the signal is sent - by ``sender``. - - The decorated function will be called when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument along - with any extra keyword arguments. - - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends.= - - .. versionadded:: 1.1 - """ - - def decorator(fn: F) -> F: - self.connect(fn, sender, weak) - return fn - - return decorator - - @contextmanager - def connected_to( - self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY - ) -> c.Generator[None, None, None]: - """A context manager that temporarily connects ``receiver`` to the - signal while a ``with`` block executes. When the block exits, the - receiver is disconnected. Useful for tests. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. - - .. versionadded:: 1.1 - """ - self.connect(receiver, sender=sender, weak=False) - - try: - yield None - finally: - self.disconnect(receiver) - - @contextmanager - def muted(self) -> c.Generator[None, None, None]: - """A context manager that temporarily disables the signal. No receivers - will be called if the signal is sent, until the ``with`` block exits. - Useful for tests. - """ - self.is_muted = True - - try: - yield None - finally: - self.is_muted = False - - def send( - self, - sender: t.Any | None = None, - /, - *, - _async_wrapper: c.Callable[ - [c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Call all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _async_wrapper: Will be called on any receivers that are async - coroutines to turn them into sync callables. For example, could run - the receiver with an event loop. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionchanged:: 1.7 - Added the ``_async_wrapper`` argument. - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if iscoroutinefunction(receiver): - if _async_wrapper is None: - raise RuntimeError("Cannot send to a coroutine function.") - - result = _async_wrapper(receiver)(sender, **kwargs) - else: - result = receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - async def send_async( - self, - sender: t.Any | None = None, - /, - *, - _sync_wrapper: c.Callable[ - [c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Await all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _sync_wrapper: Will be called on any receivers that are sync - callables to turn them into async coroutines. For example, - could call the receiver in a thread. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionadded:: 1.7 - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if not iscoroutinefunction(receiver): - if _sync_wrapper is None: - raise RuntimeError("Cannot send to a non-coroutine function.") - - result = await _sync_wrapper(receiver)(sender, **kwargs) - else: - result = await receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - def has_receivers_for(self, sender: t.Any) -> bool: - """Check if there is at least one receiver that will be called with the - given ``sender``. A receiver connected to :data:`ANY` will always be - called, regardless of sender. Does not check if weakly referenced - receivers are still live. See :meth:`receivers_for` for a stronger - search. - - :param sender: Check for receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - if not self.receivers: - return False - - if self._by_sender[ANY_ID]: - return True - - if sender is ANY: - return False - - return make_id(sender) in self._by_sender - - def receivers_for( - self, sender: t.Any - ) -> c.Generator[c.Callable[..., t.Any], None, None]: - """Yield each receiver to be called for ``sender``, in addition to those - to be called for :data:`ANY`. Weakly referenced receivers that are not - live will be disconnected and skipped. - - :param sender: Yield receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - # TODO: test receivers_for(ANY) - if not self.receivers: - return - - sender_id = make_id(sender) - - if sender_id in self._by_sender: - ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] - else: - ids = self._by_sender[ANY_ID].copy() - - for receiver_id in ids: - receiver = self.receivers.get(receiver_id) - - if receiver is None: - continue - - if isinstance(receiver, weakref.ref): - strong = receiver() - - if strong is None: - self._disconnect(receiver_id, ANY_ID) - continue - - yield strong - else: - yield receiver - - def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None: - """Disconnect ``receiver`` from being called when the signal is sent by - ``sender``. - - :param receiver: A connected receiver callable. - :param sender: Disconnect from only this sender. By default, disconnect - from all senders. - """ - sender_id: c.Hashable - - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = make_id(sender) - - receiver_id = make_id(receiver) - self._disconnect(receiver_id, sender_id) - - if ( - "receiver_disconnected" in self.__dict__ - and self.receiver_disconnected.receivers - ): - self.receiver_disconnected.send(self, receiver=receiver, sender=sender) - - def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None: - if sender_id == ANY_ID: - if self._by_receiver.pop(receiver_id, None) is not None: - for bucket in self._by_sender.values(): - bucket.discard(receiver_id) - - self.receivers.pop(receiver_id, None) - else: - self._by_sender[sender_id].discard(receiver_id) - self._by_receiver[receiver_id].discard(sender_id) - - def _make_cleanup_receiver( - self, receiver_id: c.Hashable - ) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]: - """Create a callback function to disconnect a weakly referenced - receiver when it is garbage collected. - """ - - def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None: - # If the interpreter is shutting down, disconnecting can result in a - # weird ignored exception. Don't call it in that case. - if not sys.is_finalizing(): - self._disconnect(receiver_id, ANY_ID) - - return cleanup - - def _make_cleanup_sender( - self, sender_id: c.Hashable - ) -> c.Callable[[weakref.ref[t.Any]], None]: - """Create a callback function to disconnect all receivers for a weakly - referenced sender when it is garbage collected. - """ - assert sender_id != ANY_ID - - def cleanup(ref: weakref.ref[t.Any]) -> None: - self._weak_senders.pop(sender_id, None) - - for receiver_id in self._by_sender.pop(sender_id, ()): - self._by_receiver[receiver_id].discard(sender_id) - - return cleanup - - def _cleanup_bookkeeping(self) -> None: - """Prune unused sender/receiver bookkeeping. Not threadsafe. - - Connecting & disconnecting leaves behind a small amount of bookkeeping - data. Typical workloads using Blinker, for example in most web apps, - Flask, CLI scripts, etc., are not adversely affected by this - bookkeeping. - - With a long-running process performing dynamic signal routing with high - volume, e.g. connecting to function closures, senders are all unique - object instances. Doing all of this over and over may cause memory usage - to grow due to extraneous bookkeeping. (An empty ``set`` for each stale - sender/receiver pair.) - - This method will prune that bookkeeping away, with the caveat that such - pruning is not threadsafe. The risk is that cleanup of a fully - disconnected receiver/sender pair occurs while another thread is - connecting that same pair. If you are in the highly dynamic, unique - receiver/sender situation that has lead you to this method, that failure - mode is perhaps not a big deal for you. - """ - for mapping in (self._by_sender, self._by_receiver): - for ident, bucket in list(mapping.items()): - if not bucket: - mapping.pop(ident, None) - - def _clear_state(self) -> None: - """Disconnect all receivers and senders. Useful for tests.""" - self._weak_senders.clear() - self.receivers.clear() - self._by_sender.clear() - self._by_receiver.clear() - - -class NamedSignal(Signal): - """A named generic notification emitter. The name is not used by the signal - itself, but matches the key in the :class:`Namespace` that it belongs to. - - :param name: The name of the signal within the namespace. - :param doc: The docstring for the signal. - """ - - def __init__(self, name: str, doc: str | None = None) -> None: - super().__init__(doc) - - #: The name of this signal. - self.name: str = name - - def __repr__(self) -> str: - base = super().__repr__() - return f"{base[:-1]}; {self.name!r}>" # noqa: E702 - - -class Namespace(dict[str, NamedSignal]): - """A dict mapping names to signals.""" - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` for the given ``name``, creating it - if required. Repeated calls with the same name return the same signal. - - :param name: The name of the signal. - :param doc: The docstring of the signal. - """ - if name not in self: - self[name] = NamedSignal(name, doc) - - return self[name] - - -class _PNamespaceSignal(t.Protocol): - def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ... - - -default_namespace: Namespace = Namespace() -"""A default :class:`Namespace` for creating named signals. :func:`signal` -creates a :class:`NamedSignal` in this namespace. -""" - -signal: _PNamespaceSignal = default_namespace.signal -"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given -``name``, creating it if required. Repeated calls with the same name return the -same signal. -""" diff --git a/.venv/lib/python3.12/site-packages/blinker/py.typed b/.venv/lib/python3.12/site-packages/blinker/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA deleted file mode 100644 index 3f433af..0000000 --- a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA +++ /dev/null @@ -1,84 +0,0 @@ -Metadata-Version: 2.4 -Name: click -Version: 8.3.1 -Summary: Composable command line interface toolkit -Maintainer-email: Pallets -Requires-Python: >=3.10 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: colorama; platform_system == 'Windows' -Project-URL: Changes, https://click.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/click/ - -
- -# Click - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -## A Simple Example - -```python -import click - -@click.command() -@click.option("--count", default=1, help="Number of greetings.") -@click.option("--name", prompt="Your name", help="The person to greet.") -def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - -if __name__ == '__main__': - hello() -``` - -``` -$ python hello.py --count=3 -Your name: Click -Hello, Click! -Hello, Click! -Hello, Click! -``` - - -## Donate - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD deleted file mode 100644 index 77e5c98..0000000 --- a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD +++ /dev/null @@ -1,40 +0,0 @@ -click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621 -click-8.3.1.dist-info/RECORD,, -click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 -click/__pycache__/__init__.cpython-312.pyc,, -click/__pycache__/_compat.cpython-312.pyc,, -click/__pycache__/_termui_impl.cpython-312.pyc,, -click/__pycache__/_textwrap.cpython-312.pyc,, -click/__pycache__/_utils.cpython-312.pyc,, -click/__pycache__/_winconsole.cpython-312.pyc,, -click/__pycache__/core.cpython-312.pyc,, -click/__pycache__/decorators.cpython-312.pyc,, -click/__pycache__/exceptions.cpython-312.pyc,, -click/__pycache__/formatting.cpython-312.pyc,, -click/__pycache__/globals.cpython-312.pyc,, -click/__pycache__/parser.cpython-312.pyc,, -click/__pycache__/shell_completion.cpython-312.pyc,, -click/__pycache__/termui.cpython-312.pyc,, -click/__pycache__/testing.cpython-312.pyc,, -click/__pycache__/types.cpython-312.pyc,, -click/__pycache__/utils.cpython-312.pyc,, -click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 -click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093 -click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 -click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943 -click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 -click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105 -click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 -click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954 -click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730 -click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 -click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994 -click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037 -click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102 -click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927 -click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257 diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt deleted file mode 100644 index d12a849..0000000 --- a/.venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/click/__init__.py b/.venv/lib/python3.12/site-packages/click/__init__.py deleted file mode 100644 index 1aa547c..0000000 --- a/.venv/lib/python3.12/site-packages/click/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" - -from __future__ import annotations - -from .core import Argument as Argument -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - from .core import _BaseCommand - - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - from .core import _MultiCommand - - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - if name == "OptionParser": - from .parser import _OptionParser - - warnings.warn( - "'OptionParser' is deprecated and will be removed in Click 9.0. The" - " old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return _OptionParser - - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Click 9.1. Use feature detection or" - " 'importlib.metadata.version(\"click\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("click") - - raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 05c9c16..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index b70a5b8..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc deleted file mode 100644 index e013f41..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc deleted file mode 100644 index aee1dc4..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc deleted file mode 100644 index bb0aac0..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc deleted file mode 100644 index 704e0d8..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc deleted file mode 100644 index cd5e703..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc deleted file mode 100644 index dc85e5f..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 54e8528..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc deleted file mode 100644 index ff07bf7..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc deleted file mode 100644 index 1d0e55b..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 4b82047..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc deleted file mode 100644 index 212033d..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc deleted file mode 100644 index e4bd480..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc deleted file mode 100644 index b63faff..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc deleted file mode 100644 index 345313e..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 7fab977..0000000 Binary files a/.venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/click/_compat.py b/.venv/lib/python3.12/site-packages/click/_compat.py deleted file mode 100644 index f2726b9..0000000 --- a/.venv/lib/python3.12/site-packages/click/_compat.py +++ /dev/null @@ -1,622 +0,0 @@ -from __future__ import annotations - -import codecs -import collections.abc as cabc -import io -import os -import re -import sys -import typing as t -from types import TracebackType -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write(b"") - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: str | None, errors: str | None -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: str | None, - errors: str | None, - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: str | os.PathLike[str] | int, - mode: str, - encoding: str | None, - errors: str | None, -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, -) -> tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: int | None = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> _AtomicFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s: str) -> int: - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write # type: ignore[method-assign] - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None - ) -> t.TextIO | None: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO | None], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.TextIO | None]: - cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO | None: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/.venv/lib/python3.12/site-packages/click/_termui_impl.py b/.venv/lib/python3.12/site-packages/click/_termui_impl.py deleted file mode 100644 index ee8225c..0000000 --- a/.venv/lib/python3.12/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,852 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" - -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import math -import os -import shlex -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from pathlib import Path -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: cabc.Iterable[V] | None, - length: int | None = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - label: str | None = None, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.hidden = hidden - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast("cabc.Iterable[V]", range(length)) - self.iter: cabc.Iterable[V] = iter(iterable) - self.length = length - self.pos: int = 0 - self.avg: list[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: int | None = None - self.entered: bool = False - self.current_item: V | None = None - self._is_atty = isatty(self.file) - self._last_line: str | None = None - - def __enter__(self) -> ProgressBar[V]: - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.render_finish() - - def __iter__(self) -> cabc.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.hidden or not self._is_atty: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - if self.hidden: - return - - if not self._is_atty: - # Only output the label once if the output is not a TTY. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - import shutil - - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width and self.max_width is not None: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: V | None = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> cabc.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if not self._is_atty: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - - # Split and normalize the pager command into parts. - pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) - if pager_cmd_parts: - if WIN: - if _tempfilepager(generator, pager_cmd_parts, color): - return - elif _pipepager(generator, pager_cmd_parts, color): - return - - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if (WIN or sys.platform.startswith("os2")) and _tempfilepager( - generator, ["more"], color - ): - return - if _pipepager(generator, ["less"], color): - return - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if _pipepager(generator, ["more"], color): - return - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - - import shutil - - cmd = cmd_parts[0] - cmd_params = cmd_parts[1:] - - cmd_filepath = shutil.which(cmd) - if not cmd_filepath: - return False - - # Produces a normalized absolute path string. - # multi-call binaries such as busybox derive their identity from the symlink - # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) - cmd_path = Path(cmd_filepath).absolute() - cmd_name = cmd_path.name - - import subprocess - - # Make a local copy of the environment to not affect the global one. - env = dict(os.environ) - - # If we're piping to less and the user hasn't decided on colors, we enable - # them by default we find the -R flag in the command line arguments. - if color is None and cmd_name == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen( - [str(cmd_path)] + cmd_params, - shell=False, - stdin=subprocess.PIPE, - env=env, - errors="replace", - text=True, - ) - assert c.stdin is not None - try: - for text in generator: - if not color: - text = strip_ansi(text) - - c.stdin.write(text) - except BrokenPipeError: - # In case the pager exited unexpectedly, ignore the broken pipe error. - pass - except Exception as e: - # In case there is an exception we want to close the pager immediately - # and let the caller handle it. - # Otherwise the pager will keep running, and the user may not notice - # the error message, or worse yet it may leave the terminal in a broken state. - c.terminate() - raise e - finally: - # We must close stdin and wait for the pager to exit before we continue - try: - c.stdin.close() - # Close implies flush, so it might throw a BrokenPipeError if the pager - # process exited already. - except BrokenPipeError: - pass - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - return True - - -def _tempfilepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by invoking a program on a temporary file. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - - import shutil - - cmd = cmd_parts[0] - - cmd_filepath = shutil.which(cmd) - if not cmd_filepath: - return False - # Produces a normalized absolute path string. - # multi-call binaries such as busybox derive their identity from the symlink - # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) - cmd_path = Path(cmd_filepath).absolute() - - import subprocess - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - subprocess.call([str(cmd_path), filename]) - except OSError: - # Command not found - pass - finally: - os.close(fd) - os.unlink(filename) - - return True - - -def _nullpager( - stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - - from shutil import which - - for editor in "sensible-editor", "vim", "nano": - if which(editor) is not None: - return editor - return "vi" - - def edit_files(self, filenames: cabc.Iterable[str]) -> None: - import subprocess - - editor = self.get_editor() - environ: dict[str, str] | None = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - exc_filename = " ".join(f'"{filename}"' for filename in filenames) - - try: - c = subprocess.Popen( - args=f"{editor} {exc_filename}", env=environ, shell=True - ) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - @t.overload - def edit(self, text: bytes | bytearray) -> bytes | None: ... - - # We cannot know whether or not the type expected is str or bytes when None - # is passed, so str is returned as that was what was done before. - @t.overload - def edit(self, text: str | None) -> str | None: ... - - def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: - import tempfile - - if text is None: - data: bytes | bytearray = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_files((name,)) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url) - args = ["explorer", f"/select,{url}"] - else: - args = ["start"] - if wait: - args.append("/WAIT") - args.append("") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - elif CYGWIN: - if locate: - url = _unquote_file(url) - args = ["cygstart", os.path.dirname(url)] - else: - args = ["cygstart"] - if wait: - args.append("-w") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> None: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if sys.platform == "win32": - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - - if echo: - func = t.cast(t.Callable[[], str], msvcrt.getwche) - else: - func = t.cast(t.Callable[[], str], msvcrt.getwch) - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import termios - import tty - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - f: t.TextIO | None - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/.venv/lib/python3.12/site-packages/click/_textwrap.py b/.venv/lib/python3.12/site-packages/click/_textwrap.py deleted file mode 100644 index 97fbee3..0000000 --- a/.venv/lib/python3.12/site-packages/click/_textwrap.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import textwrap -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: list[str], - cur_line: list[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> cabc.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/.venv/lib/python3.12/site-packages/click/_utils.py b/.venv/lib/python3.12/site-packages/click/_utils.py deleted file mode 100644 index 09fb008..0000000 --- a/.venv/lib/python3.12/site-packages/click/_utils.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import annotations - -import enum -import typing as t - - -class Sentinel(enum.Enum): - """Enum used to define sentinel values. - - .. seealso:: - - `PEP 661 - Sentinel Values `_. - """ - - UNSET = object() - FLAG_NEEDS_VALUE = object() - - def __repr__(self) -> str: - return f"{self.__class__.__name__}.{self.name}" - - -UNSET = Sentinel.UNSET -"""Sentinel used to indicate that a value is not set.""" - -FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE -"""Sentinel used to indicate an option was passed as a flag without a -value but is not a flag option. - -``Option.consume_value`` uses this to prompt or use the ``flag_value``. -""" - -T_UNSET = t.Literal[UNSET] # type: ignore[valid-type] -"""Type hint for the :data:`UNSET` sentinel value.""" - -T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type] -"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value.""" diff --git a/.venv/lib/python3.12/site-packages/click/_winconsole.py b/.venv/lib/python3.12/site-packages/click/_winconsole.py deleted file mode 100644 index e56c7c6..0000000 --- a/.venv/lib/python3.12/site-packages/click/_winconsole.py +++ /dev/null @@ -1,296 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -from __future__ import annotations - -import collections.abc as cabc -import io -import sys -import time -import typing as t -from ctypes import Array -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -if t.TYPE_CHECKING: - try: - # Using `typing_extensions.Buffer` instead of `collections.abc` - # on Windows for some reason does not have `Sized` implemented. - from collections.abc import Buffer # type: ignore - except ImportError: - from typing_extensions import Buffer - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ # noqa: RUF012 - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: - buf = Py_buffer() - flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - out: Array[c_char] = buffer_type.from_address(buf.buf) - return out - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle: int | None) -> None: - self.handle = handle - - def isatty(self) -> t.Literal[True]: - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self) -> t.Literal[True]: - return True - - def readinto(self, b: Buffer) -> int: - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self) -> t.Literal[True]: - return True - - @staticmethod - def _get_error_message(errno: int) -> str: - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b: Buffer) -> int: - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self) -> str: - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None -) -> t.TextIO | None: - if ( - get_buffer is None - or encoding not in {"utf-16-le", None} - or errors not in {"strict", None} - or not _is_console(f) - ): - return None - - func = _stream_factories.get(f.fileno()) - if func is None: - return None - - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/.venv/lib/python3.12/site-packages/click/core.py b/.venv/lib/python3.12/site-packages/click/core.py deleted file mode 100644 index 57f549c..0000000 --- a/.venv/lib/python3.12/site-packages/click/core.py +++ /dev/null @@ -1,3415 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from collections import Counter -from contextlib import AbstractContextManager -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from ._utils import FLAG_NEEDS_VALUE -from ._utils import UNSET -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import NoArgsIsHelpError -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _OptionParser -from .parser import _split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound="t.Callable[..., t.Any]") -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: Context, incomplete: str -) -> cabc.Iterator[tuple[str, Command]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(Group, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_nested_chain( - base_command: Group, cmd_name: str, cmd: Command, register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, Group): - return - - if register: - message = ( - f"It is not possible to add the group {cmd_name!r} to another" - f" group {base_command.name!r} that is in chain mode." - ) - else: - message = ( - f"Found the group {cmd_name!r} as subcommand to another group " - f" {base_command.name!r} that is in chain mode. This is not supported." - ) - - raise RuntimeError(message) - - -def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size), strict=False)) - - -@contextmanager -def augment_usage_errors( - ctx: Context, param: Parameter | None = None -) -> cabc.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: cabc.Sequence[Parameter], - declaration_order: cabc.Sequence[Parameter], -) -> list[Parameter]: - """Returns all declared parameters in the order they should be processed. - - The declared parameters are re-shuffled depending on the order in which - they were invoked, as well as the eagerness of each parameters. - - The invocation order takes precedence over the declaration order. I.e. the - order in which the user provided them to the CLI is respected. - - This behavior and its effect on callback evaluation is detailed at: - https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order - """ - - def sort_key(item: Parameter) -> tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.2 - The ``protected_args`` attribute is deprecated and will be removed in - Click 9.0. ``args`` will contain remaining unparsed tokens. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: type[HelpFormatter] = HelpFormatter - - def __init__( - self, - command: Command, - parent: Context | None = None, - info_name: str | None = None, - obj: t.Any | None = None, - auto_envvar_prefix: str | None = None, - default_map: cabc.MutableMapping[str, t.Any] | None = None, - terminal_width: int | None = None, - max_content_width: int | None = None, - resilient_parsing: bool = False, - allow_extra_args: bool | None = None, - allow_interspersed_args: bool | None = None, - ignore_unknown_options: bool | None = None, - help_option_names: list[str] | None = None, - token_normalize_func: t.Callable[[str], str] | None = None, - color: bool | None = None, - show_default: bool | None = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: dict[str, t.Any] = {} - #: the leftover arguments. - self.args: list[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self._protected_args: list[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: str | None = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: int | None = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: int | None = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: list[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: str | None = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: bool | None = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: bool | None = show_default - - self._close_callbacks: list[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - @property - def protected_args(self) -> list[str]: - import warnings - - warnings.warn( - "'protected_args' is deprecated and will be removed in Click 9.0." - " 'args' will contain remaining unparsed tokens.", - DeprecationWarning, - stacklevel=2, - ) - return self._protected_args - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> Context: - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> bool | None: - self._depth -= 1 - exit_result: bool | None = None - if self._depth == 0: - exit_result = self._close_with_exception_info(exc_type, exc_value, tb) - pop_context() - - return exit_result - - @contextmanager - def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: AbstractContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._close_with_exception_info(None, None, None) - - def _close_with_exception_info( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> bool | None: - """Unwind the exit stack by calling its :meth:`__exit__` providing the exception - information to allow for exception handling by the various resources registered - using :meth;`with_resource` - - :return: Whatever ``exit_stack.__exit__()`` returns. - """ - exit_result = self._exit_stack.__exit__(exc_type, exc_value, tb) - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - return exit_result - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> Context: - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: type[V]) -> V | None: - """Finds the closest object of a given type.""" - node: Context | None = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[False] = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def lookup_default(self, name: str, call: bool = True) -> t.Any | None: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name, UNSET) - - if call and callable(value): - return value() - - return value - - return UNSET - - def fail(self, message: str) -> t.NoReturn: - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> t.NoReturn: - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> t.NoReturn: - """Exits the application with a given exit code. - - .. versionchanged:: 8.2 - Callbacks and context managers registered with :meth:`call_on_close` - and :meth:`with_resource` are closed before exiting. - """ - self.close() - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: Command) -> Context: - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> V: ... - - @t.overload - def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... - - def invoke( - self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> t.Any | V: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - - .. versionchanged:: 3.2 - A new context is created, and missing arguments use default values. - """ - if isinstance(callback, Command): - other_cmd = callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - default_value = param.get_default(ctx) - # We explicitly hide the :attr:`UNSET` value to the user, as we - # choose to make it an implementation detail. And because ``invoke`` - # has been designed as part of Click public API, we return ``None`` - # instead. Refs: - # https://github.com/pallets/click/issues/3066 - # https://github.com/pallets/click/issues/3065 - # https://github.com/pallets/click/pull/3068 - if default_value is UNSET: - default_value = None - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, default_value - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = self - - with augment_usage_errors(self): - with ctx: - return callback(*args, **kwargs) - - def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(cmd, Command): - raise TypeError("Callback is not a command.") - - for param in self.params: - if param not in kwargs: - kwargs[param] = self.params[param] - - return self.invoke(cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> ParameterSource | None: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class Command: - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the command is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. - - .. versionchanged:: 8.2 - This is the base class for all commands, not ``BaseCommand``. - ``deprecated`` can be set to a string as well to customize the - deprecation message. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: type[Context] = Context - - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: str | None, - context_settings: cabc.MutableMapping[str, t.Any] | None = None, - callback: t.Callable[..., t.Any] | None = None, - params: list[Parameter] | None = None, - help: str | None = None, - epilog: str | None = None, - short_help: str | None = None, - options_metavar: str | None = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool | str = False, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings - - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: list[Parameter] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self._help_option = None - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - return { - "name": self.name, - "params": [param.to_info_dict() for param in self.get_params(ctx)], - "help": self.help, - "epilog": self.epilog, - "short_help": self.short_help, - "hidden": self.hidden, - "deprecated": self.deprecated, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> list[Parameter]: - params = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - params = [*params, help_option] - - if __debug__: - import warnings - - opts = [opt for param in params for opt in param.opts] - opts_counter = Counter(opts) - duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) - - for duplicate_opt in duplicate_opts: - warnings.warn( - ( - f"The parameter {duplicate_opt} is used more than once. " - "Remove its duplicate as parameters should be unique." - ), - stacklevel=3, - ) - - return params - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> list[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> Option | None: - """Returns the help option object. - - Skipped if :attr:`add_help_option` is ``False``. - - .. versionchanged:: 8.1.8 - The help option is now cached to avoid creating it multiple times. - """ - help_option_names = self.get_help_option_names(ctx) - - if not help_option_names or not self.add_help_option: - return None - - # Cache the help option object in private _help_option attribute to - # avoid creating it multiple times. Not doing this will break the - # callback odering by iter_params_for_processing(), which relies on - # object comparison. - if self._help_option is None: - # Avoid circular import. - from .decorators import help_option - - # Apply help_option decorator and pop resulting option - help_option(*help_option_names)(self) - self._help_option = self.params.pop() # type: ignore[assignment] - - return self._help_option - - def make_parser(self, ctx: Context) -> _OptionParser: - """Creates the underlying option parser for this command.""" - parser = _OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: Context | None = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - _, args = param.handle_parse_result(ctx, opts, args) - - # We now have all parameters' values into `ctx.params`, but the data may contain - # the `UNSET` sentinel. - # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`. - # - # Waiting until after the initial parse to convert allows us to treat `UNSET` - # more like a missing value when multiple params use the same name. - # Refs: - # https://github.com/pallets/click/issues/3071 - # https://github.com/pallets/click/pull/3079 - for name, value in ctx.params.items(): - if value is UNSET: - ctx.params[name] = None - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" - ).format(name=self.name, extra_message=extra_message) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: list[CompletionItem] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, Group) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx._protected_args - ) - - return results - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: t.Literal[True] = True, - **extra: t.Any, - ) -> t.NoReturn: ... - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: ... - - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str | None = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: type) -> bool: - return issubclass(subclass, cls.__bases__[0]) - - def __instancecheck__(cls, instance: t.Any) -> bool: - return isinstance(instance, cls.__bases__[0]) - - -class _BaseCommand(Command, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Command`` instead. - """ - - -class Group(Command): - """A group is a command that nests other commands (or more groups). - - :param name: The name of the group command. - :param commands: Map names to :class:`Command` objects. Can be a list, which - will use :attr:`Command.name` as the keys. - :param invoke_without_command: Invoke the group's callback even if a - subcommand is not given. - :param no_args_is_help: If no arguments are given, show the group's help and - exit. Defaults to the opposite of ``invoke_without_command``. - :param subcommand_metavar: How to represent the subcommand argument in help. - The default will represent whether ``chain`` is set or not. - :param chain: Allow passing more than one subcommand argument. After parsing - a command's arguments, if any arguments remain another command will be - matched, and so on. - :param result_callback: A function to call after the group's and - subcommand's callbacks. The value returned by the subcommand is passed. - If ``chain`` is enabled, the value will be a list of values returned by - all the commands. If ``invoke_without_command`` is enabled, the value - will be the value returned by the group's callback, or an empty list if - ``chain`` is enabled. - :param kwargs: Other arguments passed to :class:`Command`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - - .. versionchanged:: 8.2 - Merged with and replaces the ``MultiCommand`` base class. - """ - - allow_extra_args = True - allow_interspersed_args = False - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: type[Command] | None = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: type[Group] | type[type] | None = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: str | None = None, - commands: cabc.MutableMapping[str, Command] - | cabc.Sequence[Command] - | None = None, - invoke_without_command: bool = False, - no_args_is_help: bool | None = None, - subcommand_metavar: str | None = None, - chain: bool = False, - result_callback: t.Callable[..., t.Any] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: cabc.MutableMapping[str, Command] = commands - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "A group in chain mode cannot have optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def add_command(self, cmd: Command, name: str | None = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_nested_chain(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'command(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> Group: ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'group(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> Group: - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - inner = old_callback(value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv # type: ignore[return-value] - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - """Given a context and a command name, this returns a :class:`Command` - object if it exists or returns ``None``. - """ - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> list[str]: - """Returns a list of subcommand names in the order they should appear.""" - return sorted(self.commands) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx._protected_args = rest - ctx.args = [] - elif rest: - ctx._protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx._protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx._protected_args, *ctx.args] - ctx.args = [] - ctx._protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: list[str] - ) -> tuple[str | None, Command | None, list[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if _split_opt(cmd_name)[0]: - self.parse_args(ctx, args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class _MultiCommand(Group, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Group`` instead. - """ - - -class CommandCollection(Group): - """A :class:`Group` that looks up subcommands on other groups. If a command - is not found on this group, each registered source is checked in order. - Parameters on a source are not added to this group, and a source's callback - is not invoked when invoking its commands. In other words, this "flattens" - commands in many groups into this one group. - - :param name: The name of the group command. - :param sources: A list of :class:`Group` objects to look up commands from. - :param kwargs: Other arguments passed to :class:`Group`. - - .. versionchanged:: 8.2 - This is a subclass of ``Group``. Commands are looked up first on this - group, then each of its sources. - """ - - def __init__( - self, - name: str | None = None, - sources: list[Group] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - #: The list of registered groups. - self.sources: list[Group] = sources or [] - - def add_source(self, group: Group) -> None: - """Add a group as a source of commands.""" - self.sources.append(group) - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - rv = super().get_command(ctx, cmd_name) - - if rv is not None: - return rv - - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_nested_chain(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> list[str]: - rv: set[str] = set(super().list_commands(ctx)) - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: environment variable(s) that are used to provide a default value for - this parameter. This can be a string or a sequence of strings. If a sequence is - given, only the first non-empty environment variable is used for the parameter. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the argument is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. A deprecated parameter - cannot be required, a ValueError will be raised otherwise. - - .. versionchanged:: 8.2.0 - Introduction of ``deprecated``. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - type: types.ParamType | t.Any | None = None, - required: bool = False, - # XXX The default historically embed two concepts: - # - the declaration of a Parameter object carrying the default (handy to - # arbitrage the default value of coupled Parameters sharing the same - # self.name, like flag options), - # - and the actual value of the default. - # It is confusing and is the source of many issues discussed in: - # https://github.com/pallets/click/pull/3030 - # In the future, we might think of splitting it in two, not unlike - # Option.is_flag and Option.flag_value: we could have something like - # Parameter.is_default and Parameter.default_value. - default: t.Any | t.Callable[[], t.Any] | None = UNSET, - callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, - nargs: int | None = None, - multiple: bool = False, - metavar: str | None = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: str | cabc.Sequence[str] | None = None, - shell_complete: t.Callable[ - [Context, Parameter, str], list[CompletionItem] | list[str] - ] - | None = None, - deprecated: bool | str = False, - ) -> None: - self.name: str | None - self.opts: list[str] - self.secondary_opts: list[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default: t.Any | t.Callable[[], t.Any] | None = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - self.deprecated = deprecated - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - if required and deprecated: - raise ValueError( - f"The {self.param_type_name} '{self.human_readable_name}' " - "is deprecated and still required. A deprecated " - f"{self.param_type_name} cannot be required." - ) - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionchanged:: 8.3.0 - Returns ``None`` for the :attr:`default` if it was not set. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - # We explicitly hide the :attr:`UNSET` value to the user, as we choose to - # make it an implementation detail. And because ``to_info_dict`` has been - # designed for documentation purposes, we return ``None`` instead. - "default": self.default if self.default is not UNSET else None, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(param=self, ctx=ctx) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is UNSET: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, t.Any] - ) -> tuple[t.Any, ParameterSource]: - """Returns the parameter value produced by the parser. - - If the parser did not produce a value from user input, the value is either - sourced from the environment variable, the default map, or the parameter's - default value. In that order of precedence. - - If no value is found, an internal sentinel value is returned. - - :meta private: - """ - # Collect from the parse the value passed by the user to the CLI. - value = opts.get(self.name, UNSET) # type: ignore - # If the value is set, it means it was sourced from the command line by the - # parser, otherwise it left unset by default. - source = ( - ParameterSource.COMMANDLINE - if value is not UNSET - else ParameterSource.DEFAULT - ) - - if value is UNSET: - envvar_value = self.value_from_envvar(ctx) - if envvar_value is not None: - value = envvar_value - source = ParameterSource.ENVIRONMENT - - if value is UNSET: - default_map_value = ctx.lookup_default(self.name) # type: ignore - if default_map_value is not UNSET: - value = default_map_value - source = ParameterSource.DEFAULT_MAP - - if value is UNSET: - default_value = self.get_default(ctx) - if default_value is not UNSET: - value = default_value - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the parameter's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - if self.multiple or self.nargs == -1: - return () - else: - return value - - def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - # Define the conversion function based on nargs and type. - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - """A value is considered missing if: - - - it is :attr:`UNSET`, - - or if it is an empty sequence while the parameter is suppose to have - non-single value (i.e. :attr:`nargs` is not ``1`` or :attr:`multiple` is - set). - - :meta private: - """ - if value is UNSET: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - """Process the value of this parameter: - - 1. Type cast the value using :meth:`type_cast_value`. - 2. Check if the value is missing (see: :meth:`value_is_missing`), and raise - :exc:`MissingParameter` if it is required. - 3. If a :attr:`callback` is set, call it to have the value replaced by the - result of the callback. If the value was not set, the callback receive - ``None``. This keep the legacy behavior as it was before the introduction of - the :attr:`UNSET` sentinel. - - :meta private: - """ - # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the - # cases in which `UNSET` gets special treatment explicitly at this layer - # - # Refs: - # https://github.com/pallets/click/issues/3069 - if value is UNSET: - if self.multiple or self.nargs == -1: - value = () - else: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - # Legacy case: UNSET is not exposed directly to the callback, but converted - # to None. - if value is UNSET: - value = None - - # Search for parameters with UNSET values in the context. - unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET} - # No UNSET values, call the callback as usual. - if not unset_keys: - value = self.callback(ctx, self, value) - - # Legacy case: provide a temporarily manipulated context to the callback - # to hide UNSET values as None. - # - # Refs: - # https://github.com/pallets/click/issues/3136 - # https://github.com/pallets/click/pull/3137 - else: - # Add another layer to the context stack to clearly hint that the - # context is temporarily modified. - with ctx: - # Update the context parameters to replace UNSET with None. - ctx.params.update(unset_keys) - # Feed these fake context parameters to the callback. - value = self.callback(ctx, self, value) - # Restore the UNSET values in the context parameters. - ctx.params.update( - { - k: UNSET - for k in unset_keys - # Only restore keys that are present and still None, in case - # the callback modified other parameters. - if k in ctx.params and ctx.params[k] is None - } - ) - - return value - - def resolve_envvar_value(self, ctx: Context) -> str | None: - """Returns the value found in the environment variable(s) attached to this - parameter. - - Environment variables values are `always returned as strings - `_. - - This method returns ``None`` if: - - - the :attr:`envvar` property is not set on the :class:`Parameter`, - - the environment variable is not found in the environment, - - the variable is found in the environment but its value is empty (i.e. the - environment variable is present but has an empty string). - - If :attr:`envvar` is setup with multiple environment variables, - then only the first non-empty value is returned. - - .. caution:: - - The raw value extracted from the environment is not normalized and is - returned as-is. Any normalization or reconciliation is performed later by - the :class:`Parameter`'s :attr:`type`. - - :meta private: - """ - if not self.envvar: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - # Return the first non-empty value of the list of environment variables. - if rv: - return rv - # Else, absence of value is interpreted as an environment variable that - # is not set, so proceed to the next one. - - return None - - def value_from_envvar(self, ctx: Context) -> str | cabc.Sequence[str] | None: - """Process the raw environment variable string for this parameter. - - Returns the string as-is or splits it into a sequence of strings if the - parameter is expecting multiple values (i.e. its :attr:`nargs` property is set - to a value other than ``1``). - - :meta private: - """ - rv = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - return self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] - ) -> tuple[t.Any, list[str]]: - """Process the value produced by the parser from user input. - - Always process the value through the Parameter's :attr:`type`, wherever it - comes from. - - If the parameter is deprecated, this method warn the user about it. But only if - the value has been explicitly set by the user (and as such, is not coming from - a default). - - :meta private: - """ - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - - ctx.set_parameter_source(self.name, source) # type: ignore - - # Display a deprecation warning if necessary. - if ( - self.deprecated - and value is not UNSET - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ): - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The {param_type} {name!r} is deprecated." - "{extra_message}" - ).format( - param_type=self.param_type_name, - name=self.human_readable_name, - extra_message=extra_message, - ) - echo(style(message, fg="red"), err=True) - - # Process the value through the parameter's type. - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - # In resilient parsing mode, we do not want to fail the command if the - # value is incompatible with the parameter type, so we reset the value - # to UNSET, which will be interpreted as a missing value. - value = UNSET - - # Add parameter's value to the context. - if ( - self.expose_value - # We skip adding the value if it was previously set by another parameter - # targeting the same variable name. This prevents parameters competing for - # the same name to override each other. - and (self.name not in ctx.params or ctx.params[self.name] is UNSET) - ): - # Click is logically enforcing that the name is None if the parameter is - # not to be exposed. We still assert it here to please the type checker. - assert self.name is not None, ( - f"{self!r} parameter's name should not be None when exposing value." - ) - ctx.params[self.name] = value - - return value, args - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - pass - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast("list[CompletionItem]", results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page and error messages. - Normally, environment variables are not shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. A deprecated option cannot be - prompted. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.2 - ``envvar`` used with ``flag_value`` will always use the ``flag_value``, - previously it would use the value of the environment variable. - - .. versionchanged:: 8.1 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - show_default: bool | str | None = None, - prompt: bool | str = False, - confirmation_prompt: bool | str = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: bool | None = None, - flag_value: t.Any = UNSET, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: types.ParamType | t.Any | None = None, - help: str | None = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - deprecated: bool | str = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - super().__init__( - param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs - ) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: str | None = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - if deprecated: - deprecated_message = ( - f"(DEPRECATED: {deprecated})" - if isinstance(deprecated, str) - else "(DEPRECATED)" - ) - help = help + deprecated_message if help is not None else deprecated_message - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # The _flag_needs_value property tells the parser that this option is a flag - # that cannot be used standalone and needs a value. With this information, the - # parser can determine whether to consider the next user-provided argument in - # the CLI as a value for this flag or as a new option. - # If prompt is enabled but not required, then it opens the possibility for the - # option to gets its value from the user. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - # Auto-detect if this is a flag or not. - if is_flag is None: - # Implicitly a flag because flag_value was set. - if flag_value is not UNSET: - is_flag = True - # Not a flag, but when used as a flag it shows a prompt. - elif self._flag_needs_value: - is_flag = False - # Implicitly a flag because secondary options names were given. - elif self.secondary_opts: - is_flag = True - # The option is explicitly not a flag. But we do not know yet if it needs a - # value or not. So we look at the default value to determine it. - elif is_flag is False and not self._flag_needs_value: - self._flag_needs_value = self.default is UNSET - - if is_flag: - # Set missing default for flags if not explicitly required or prompted. - if self.default is UNSET and not self.required and not self.prompt: - if multiple: - self.default = () - - # Auto-detect the type of the flag based on the flag_value. - if type is None: - # A flag without a flag_value is a boolean flag. - if flag_value is UNSET: - self.type: types.ParamType = types.BoolParamType() - # If the flag value is a boolean, use BoolParamType. - elif isinstance(flag_value, bool): - self.type = types.BoolParamType() - # Otherwise, guess the type from the flag value. - else: - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = bool(is_flag) - self.is_bool_flag: bool = bool( - is_flag and isinstance(self.type, types.BoolParamType) - ) - self.flag_value: t.Any = flag_value - - # Set boolean flag default to False if unset and not required. - if self.is_bool_flag: - if self.default is UNSET and not self.required: - self.default = False - - # Support the special case of aligning the default value with the flag_value - # for flags whose default is explicitly set to True. Note that as long as we - # have this condition, there is no way a flag can have a default set to True, - # and a flag_value set to something else. Refs: - # https://github.com/pallets/click/issues/3024#issuecomment-3146199461 - # https://github.com/pallets/click/pull/3030/commits/06847da - if self.default is True and self.flag_value is not UNSET: - self.default = self.flag_value - - # Set the default flag_value if it is not set. - if self.flag_value is UNSET: - if self.is_flag: - self.flag_value = True - else: - self.flag_value = None - - # Counting. - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if self.default is UNSET: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if deprecated and prompt: - raise ValueError("`deprecated` options cannot use `prompt`.") - - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> dict[str, t.Any]: - """ - .. versionchanged:: 8.3.0 - Returns ``None`` for the :attr:`flag_value` if it was not set. - """ - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - # We explicitly hide the :attr:`UNSET` value to the user, as we choose to - # make it an implementation detail. And because ``to_info_dict`` has been - # designed for documentation purposes, we return ``None`` instead. - flag_value=self.flag_value if self.flag_value is not UNSET else None, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def get_error_hint(self, ctx: Context) -> str: - result = super().get_error_hint(ctx) - if self.show_envvar and self.envvar is not None: - result += f" (env var: '{self.envvar}')" - return result - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(_split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(_split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError( - f"Could not determine name for option with declarations {decls!r}" - ) - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: cabc.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar(ctx=ctx)}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - - extra = self.get_help_extra(ctx) - extra_items = [] - if "envvars" in extra: - extra_items.append( - _("env var: {var}").format(var=", ".join(extra["envvars"])) - ) - if "default" in extra: - extra_items.append(_("default: {default}").format(default=extra["default"])) - if "range" in extra: - extra_items.append(extra["range"]) - if "required" in extra: - extra_items.append(_(extra["required"])) - - if extra_items: - extra_str = "; ".join(extra_items) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: - extra: types.OptionHelpExtra = {} - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - if isinstance(envvar, str): - extra["envvars"] = (envvar,) - else: - extra["envvars"] = tuple(str(d) for d in envvar) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or ( - show_default and (default_value not in (None, UNSET)) - ): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif isinstance(default_value, enum.Enum): - default_string = default_value.name - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = _split_opt( - (self.opts if default_value else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - elif default_value == "": - default_string = '""' - else: - default_string = str(default_value) - - if default_string: - extra["default"] = default_string - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra["range"] = range_str - - if self.required: - extra["required"] = "required" - - return extra - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to lock in the value before - # attempting any user interaction. - default = self.get_default(ctx) - - # A boolean flag can use a simplified [y/n] confirmation prompt. - if self.is_bool_flag: - # If we have no boolean default, we force the user to explicitly provide - # one. - if default in (UNSET, None): - default = None - # Nothing prevent you to declare an option that is simultaneously: - # 1) auto-detected as a boolean flag, - # 2) allowed to prompt, and - # 3) still declare a non-boolean default. - # This forced casting into a boolean is necessary to align any non-boolean - # default to the prompt, which is going to be a [y/n]-style confirmation - # because the option is still a boolean flag. That way, instead of [y/n], - # we get [Y/n] or [y/N] depending on the truthy value of the default. - # Refs: https://github.com/pallets/click/pull/3030#discussion_r2289180249 - else: - default = bool(default) - return confirm(self.prompt, default) - - # If show_default is set to True/False, provide this to `prompt` as well. For - # non-bool values of `show_default`, we use `prompt`'s default behavior - prompt_kwargs: t.Any = {} - if isinstance(self.show_default, bool): - prompt_kwargs["show_default"] = self.show_default - - return prompt( - self.prompt, - # Use ``None`` to inform the prompt() function to reiterate until a valid - # value is provided by the user if we have no default. - default=None if default is UNSET else default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - **prompt_kwargs, - ) - - def resolve_envvar_value(self, ctx: Context) -> str | None: - """:class:`Option` resolves its environment variable the same way as - :func:`Parameter.resolve_envvar_value`, but it also supports - :attr:`Context.auto_envvar_prefix`. If we could not find an environment from - the :attr:`envvar` property, we fallback on :attr:`Context.auto_envvar_prefix` - to build dynamiccaly the environment variable name using the - :python:`{ctx.auto_envvar_prefix}_{self.name.upper()}` template. - - :meta private: - """ - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any: - """For :class:`Option`, this method processes the raw environment variable - string the same way as :func:`Parameter.value_from_envvar` does. - - But in the case of non-boolean flags, the value is analyzed to determine if the - flag is activated or not, and returns a boolean of its activation, or the - :attr:`flag_value` if the latter is set. - - This method also takes care of repeated options (i.e. options with - :attr:`multiple` set to ``True``). - - :meta private: - """ - rv = self.resolve_envvar_value(ctx) - - # Absent environment variable or an empty string is interpreted as unset. - if rv is None: - return None - - # Non-boolean flags are more liberal in what they accept. But a flag being a - # flag, its envvar value still needs to be analyzed to determine if the flag is - # activated or not. - if self.is_flag and not self.is_bool_flag: - # If the flag_value is set and match the envvar value, return it - # directly. - if self.flag_value is not UNSET and rv == self.flag_value: - return self.flag_value - # Analyze the envvar value as a boolean to know if the flag is - # activated or not. - return types.BoolParamType.str_to_bool(rv) - - # Split the envvar value if it is allowed to be repeated. - value_depth = (self.nargs != 1) + bool(self.multiple) - if value_depth > 0: - multi_rv = self.type.split_envvar_value(rv) - if self.multiple and self.nargs != 1: - multi_rv = batch(multi_rv, self.nargs) # type: ignore[assignment] - - return multi_rv - - return rv - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, Parameter] - ) -> tuple[t.Any, ParameterSource]: - """For :class:`Option`, the value can be collected from an interactive prompt - if the option is a flag that needs a value (and the :attr:`prompt` property is - set). - - Additionally, this method handles flag option that are activated without a - value, in which case the :attr:`flag_value` is returned. - - :meta private: - """ - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option is allowed to as a flag - # without a value. - if value is FLAG_NEEDS_VALUE: - # If the option allows for a prompt, we start an interaction with the user. - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - # Else the flag takes its flag_value as value. - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - # A flag which is activated always returns the flag value, unless the value - # comes from the explicitly sets default. - elif ( - self.is_flag - and value is True - and not self.is_bool_flag - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ): - value = self.flag_value - - # Re-interpret a multiple option which has been sent as-is by the parser. - # Here we replace each occurrence of value-less flags (marked by the - # FLAG_NEEDS_VALUE sentinel) with the flag_value. - elif ( - self.multiple - and value is not UNSET - and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - and any(v is FLAG_NEEDS_VALUE for v in value) - ): - value = [self.flag_value if v is FLAG_NEEDS_VALUE else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt for one to the user - # if prompting is enabled. - elif ( - ( - value is UNSET - or source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) - ) - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - # process_value has to be overridden on Options in order to capture - # `value == UNSET` cases before `type_cast_value()` gets called. - # - # Refs: - # https://github.com/pallets/click/issues/3069 - if self.is_flag and not self.required and self.is_bool_flag and value is UNSET: - value = False - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - # in the normal case, rely on Parameter.process_value - return super().process_value(ctx, value) - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: cabc.Sequence[str], - required: bool | None = None, - **attrs: t.Any, - ) -> None: - # Auto-detect the requirement status of the argument if not explicitly set. - if required is None: - # The argument gets automatically required if it has no explicit default - # value set and is setup to match at least one value. - if attrs.get("default", UNSET) is UNSET: - required = attrs.get("nargs", 1) > 0 - # If the argument has a default value, it is not required. - else: - required = False - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(param=self, ctx=ctx) - if not var: - var = self.name.upper() # type: ignore - if self.deprecated: - var += "!" - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Argument is marked as exposed, but does not have a name.") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}: {decls}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [self.make_metavar(ctx)] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar(ctx)}'" - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/decorators.py b/.venv/lib/python3.12/site-packages/click/decorators.py deleted file mode 100644 index 21f4c34..0000000 --- a/.venv/lib/python3.12/site-packages/click/decorators.py +++ /dev/null @@ -1,551 +0,0 @@ -from __future__ import annotations - -import inspect -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound="_AnyCallable | Command") - - -def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: type[T], ensure: bool = False -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - - obj: T | None - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: str | None = None -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: str | None, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: ... - - -def command( - name: str | _AnyCallable | None = None, - cls: type[CmdType] | None = None, - **attrs: t.Any, -) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function, converted to - lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes - ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, - ``init_data_command`` becomes ``init-data``. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: The name of the command. Defaults to modifying the function's - name as described above. - :param cls: The command class to create. Defaults to :class:`Command`. - - .. versionchanged:: 8.2 - The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are - removed when generating the name. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Callable[[_AnyCallable], t.Any] | None = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast("type[CmdType]", Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - if name is not None: - cmd_name = name - else: - cmd_name = f.__name__.lower().replace("_", "-") - cmd_left, sep, suffix = cmd_name.rpartition("-") - - if sep and suffix in {"command", "cmd", "group", "grp"}: - cmd_name = cmd_left - - cmd = cls(name=cmd_name, callback=f, params=params, **attrs) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: str | None, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: ... - - -def group( - name: str | _AnyCallable | None = None, - cls: type[GrpType] | None = None, - **attrs: t.Any, -) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast("type[GrpType]", Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: str | None = None, - *param_decls: str, - package_name: str | None = None, - prog_name: str | None = None, - message: str | None = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - import importlib.metadata - - try: - version = importlib.metadata.version(package_name) - except importlib.metadata.PackageNotFoundError: - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Pre-configured ``--help`` option which immediately prints the help page - and exits the program. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def show_help(ctx: Context, param: Parameter, value: bool) -> None: - """Callback that print the help page on ```` and exits.""" - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs.setdefault("callback", show_help) - - return option(*param_decls, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/click/exceptions.py b/.venv/lib/python3.12/site-packages/click/exceptions.py deleted file mode 100644 index 4d782ee..0000000 --- a/.venv/lib/python3.12/site-packages/click/exceptions.py +++ /dev/null @@ -1,308 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .globals import resolve_color_default -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - # The context will be removed by the time we print the message, so cache - # the color settings here to be used later on (in `show`) - self.show_color: bool | None = resolve_color_default() - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=self.show_color, - ) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: Context | None = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: Command | None = self.ctx.command if self.ctx else None - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: cabc.Sequence[str] | str | None = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: str | None = None, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: cabc.Sequence[str] | str | None = None, - param_type: str | None = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: cabc.Sequence[str] | str | None = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message( - param=self.param, ctx=self.ctx - ) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: str | None = None, - possibilities: cabc.Sequence[str] | None = None, - ctx: Context | None = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: Context | None = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class NoArgsIsHelpError(UsageError): - def __init__(self, ctx: Context) -> None: - self.ctx: Context - super().__init__(ctx.get_help(), ctx=ctx) - - def show(self, file: t.IO[t.Any] | None = None) -> None: - echo(self.format_message(), file=file, err=True, color=self.ctx.color) - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: str | None = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/.venv/lib/python3.12/site-packages/click/formatting.py b/.venv/lib/python3.12/site-packages/click/formatting.py deleted file mode 100644 index 0b64f83..0000000 --- a/.venv/lib/python3.12/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import _split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: int | None = None - - -def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: - widths: dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: cabc.Iterable[tuple[str, str]], col_count: int -) -> cabc.Iterator[tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: list[tuple[int, bool, str]] = [] - buf: list[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: int | None = None, - max_width: int | None = None, - ) -> None: - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - import shutil - - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent: int = 0 - self.buffer: list[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: cabc.Sequence[tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> cabc.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> cabc.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = _split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/.venv/lib/python3.12/site-packages/click/globals.py b/.venv/lib/python3.12/site-packages/click/globals.py deleted file mode 100644 index a2f9172..0000000 --- a/.venv/lib/python3.12/site-packages/click/globals.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -import typing as t -from threading import local - -if t.TYPE_CHECKING: - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: t.Literal[False] = False) -> Context: ... - - -@t.overload -def get_current_context(silent: bool = ...) -> Context | None: ... - - -def get_current_context(silent: bool = False) -> Context | None: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: Context) -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: bool | None = None) -> bool | None: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/.venv/lib/python3.12/site-packages/click/parser.py b/.venv/lib/python3.12/site-packages/click/parser.py deleted file mode 100644 index 1ea1f71..0000000 --- a/.venv/lib/python3.12/site-packages/click/parser.py +++ /dev/null @@ -1,532 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" - -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from ._utils import FLAG_NEEDS_VALUE -from ._utils import UNSET -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - from ._utils import T_FLAG_NEEDS_VALUE - from ._utils import T_UNSET - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - - -def _unpack_args( - args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] -) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with ``UNSET``. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = [] - spos: int | None = None - - def _fetch(c: deque[V]) -> V | T_UNSET: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return UNSET - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) # type: ignore[arg-type] - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(UNSET) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def _split_opt(opt: str) -> tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def _normalize_opt(opt: str, ctx: Context | None) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = _split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -class _Option: - def __init__( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: set[str] = set() - - for opt in opts: - prefix, value = _split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: _ParsingState) -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class _Argument: - def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: str | cabc.Sequence[str | None] | None | T_UNSET, - state: _ParsingState, - ) -> None: - if self.nargs > 1: - assert isinstance(value, cabc.Sequence) - holes = sum(1 for x in value if x is UNSET) - if holes == len(value): - value = UNSET - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - # We failed to collect any argument value so we consider the argument as unset. - if value == (): - value = UNSET - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class _ParsingState: - def __init__(self, rargs: list[str]) -> None: - self.opts: dict[str, t.Any] = {} - self.largs: list[str] = [] - self.rargs = rargs - self.order: list[CoreParameter] = [] - - -class _OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - - .. deprecated:: 8.2 - Will be removed in Click 9.0. - """ - - def __init__(self, ctx: Context | None = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: dict[str, _Option] = {} - self._long_opt: dict[str, _Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: list[_Argument] = [] - - def add_option( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [_normalize_opt(opt, self.ctx) for opt in opts] - option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(_Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: list[str] - ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = _ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: _ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: _ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: str | None, state: _ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = UNSET - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: _ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = _normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = UNSET - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: _Option, state: _ParsingState - ) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE: - nargs = option.nargs - - value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = FLAG_NEEDS_VALUE - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = FLAG_NEEDS_VALUE - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: _ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = _normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) - - -def __getattr__(name: str) -> object: - import warnings - - if name in { - "OptionParser", - "Argument", - "Option", - "split_opt", - "normalize_opt", - "ParsingState", - }: - warnings.warn( - f"'parser.{name}' is deprecated and will be removed in Click 9.0." - " The old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return globals()[f"_{name}"] - - if name == "split_arg_string": - from .shell_completion import split_arg_string - - warnings.warn( - "Importing 'parser.split_arg_string' is deprecated, it will only be" - " available in 'shell_completion' in Click 9.0.", - DeprecationWarning, - stacklevel=2, - ) - return split_arg_string - - raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/click/py.typed b/.venv/lib/python3.12/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/click/shell_completion.py b/.venv/lib/python3.12/site-packages/click/shell_completion.py deleted file mode 100644 index 8f1564c..0000000 --- a/.venv/lib/python3.12/site-packages/click/shell_completion.py +++ /dev/null @@ -1,667 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .utils import echo - - -def shell_complete( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: str | None = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: str | None = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -# See ZshComplete.format_completion below, and issue #2703, before -# changing this script. -# -# (TL;DR: _describe is picky about the format, but this Zsh script snippet -# is already widely deployed. So freeze this script, and use clever-ish -# handling of colons in ZshComplet.format_completion.) -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> tuple[list[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import shutil - import subprocess - - bash_exe = shutil.which("bash") - - if bash_exe is None: - match = None - else: - output = subprocess.run( - [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], - stdout=subprocess.PIPE, - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - help_ = item.help or "_" - # The zsh completion script uses `_describe` on items with help - # texts (which splits the item help from the item value at the - # first unescaped colon) and `compadd` on items without help - # text (which uses the item value as-is and does not support - # colon escaping). So escape colons in the item value if and - # only if the item help is not the sentinel "_" value, as used - # by the completion script. - # - # (The zsh completion script is potentially widely deployed, and - # thus harder to fix than this method.) - # - # See issue #1812 and issue #2703 for further context. - value = item.value.replace(":", r"\:") if help_ != "_" else item.value - return f"{item.type}\n{value}\n{help_}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - if incomplete: - incomplete = split_arg_string(incomplete)[0] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") - - -_available_shells: dict[str, type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: str | None = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> type[ShellComplete] | None: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def split_arg_string(string: str) -> list[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - - .. versionchanged:: 8.2 - Moved to ``shell_completion`` from ``parser``. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - break - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - args: list[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: - args = ctx._protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, Group): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, args, parent=ctx, resilient_parsing=True - ) as sub_ctx: - ctx = sub_ctx - args = ctx._protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) as sub_sub_ctx: - sub_ctx = sub_sub_ctx - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx._protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: list[str], incomplete: str -) -> tuple[Command | Parameter, str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/.venv/lib/python3.12/site-packages/click/termui.py b/.venv/lib/python3.12/site-packages/click/termui.py deleted file mode 100644 index 2e98a07..0000000 --- a/.venv/lib/python3.12/site-packages/click/termui.py +++ /dev/null @@ -1,883 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import inspect -import io -import itertools -import sys -import typing as t -from contextlib import AbstractContextManager -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Any | None = None, - show_choices: bool = True, - type: ParamType | None = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Any | None = None, - hide_input: bool = False, - confirmation_prompt: bool | str = False, - type: ParamType | t.Any | None = None, - value_proc: t.Callable[[str], t.Any] | None = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionchanged:: 8.3.1 - A space is no longer appended to the prompt. - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text[:-1], nl=False, err=err) - # Echo the last character to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(text[-1:]) - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: bool | None = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.3.1 - A space is no longer appended to the prompt. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt[:-1], nl=False, err=err) - # Echo the last character to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(prompt[-1:]).lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, - color: bool | None = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -@t.overload -def progressbar( - *, - length: int, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[int]: ... - - -@t.overload -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: ... - - -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param hidden: hide the progressbar. Defaults to ``False``. When no tty is - detected, it will only print the progressbar label. Setting this to - ``False`` also disables that. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionadded:: 8.2 - The ``hidden`` argument. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - The ``update_min_steps`` parameter. - - .. versionadded:: 4.0 - The ``color`` parameter and ``update`` method. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - hidden=hidden, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: int | tuple[int, int, int] | str | None = None, - bg: int | tuple[int, int, int] | str | None = None, - bold: bool | None = None, - dim: bool | None = None, - underline: bool | None = None, - overline: bool | None = None, - italic: bool | None = None, - blink: bool | None = None, - reverse: bool | None = None, - strikethrough: bool | None = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Any | None = None, - file: t.IO[t.AnyStr] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -@t.overload -def edit( - text: bytes | bytearray, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = False, - extension: str = ".txt", -) -> bytes | None: ... - - -@t.overload -def edit( - text: str, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", -) -> str | None: ... - - -@t.overload -def edit( - text: None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> None: ... - - -def edit( - text: str | bytes | bytearray | None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> str | bytes | bytearray | None: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. If the editor supports - editing multiple files at once, a sequence of files may be - passed as well. Invoke `click.file` once per file instead - if multiple files cannot be managed at once or editing the - files serially is desired. - - .. versionchanged:: 8.2.0 - ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` - if the ``editor`` supports editing multiple files at once. - - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - if isinstance(filename, str): - filename = (filename,) - - ed.edit_files(filenames=filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Callable[[bool], str] | None = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> AbstractContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: str | None = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/.venv/lib/python3.12/site-packages/click/testing.py b/.venv/lib/python3.12/site-packages/click/testing.py deleted file mode 100644 index f6f60b8..0000000 --- a/.venv/lib/python3.12/site-packages/click/testing.py +++ /dev/null @@ -1,577 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import io -import os -import shlex -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import _compat -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from _typeshed import ReadableBuffer - - from .core import Command - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> list[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> cabc.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class BytesIOCopy(io.BytesIO): - """Patch ``io.BytesIO`` to let the written stream be copied to another. - - .. versionadded:: 8.2 - """ - - def __init__(self, copy_to: io.BytesIO) -> None: - super().__init__() - self.copy_to = copy_to - - def flush(self) -> None: - super().flush() - self.copy_to.flush() - - def write(self, b: ReadableBuffer) -> int: - self.copy_to.write(b) - return super().write(b) - - -class StreamMixer: - """Mixes `` and `` streams. - - The result is available in the ``output`` attribute. - - .. versionadded:: 8.2 - """ - - def __init__(self) -> None: - self.output: io.BytesIO = io.BytesIO() - self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) - self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) - - def __del__(self) -> None: - """ - Guarantee that embedded file-like objects are closed in a - predictable order, protecting against races between - self.output being closed and other streams being flushed on close - - .. versionadded:: 8.2.2 - """ - self.stderr.close() - self.stdout.close() - self.output.close() - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: str | bytes | t.IO[t.Any] | None, charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script. - - :param runner: The runner that created the result - :param stdout_bytes: The standard output as bytes. - :param stderr_bytes: The standard error as bytes. - :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the - user would see it in its terminal. - :param return_value: The value returned from the invoked command. - :param exit_code: The exit code as integer. - :param exception: The exception that happened if one did. - :param exc_info: Exception information (exception type, exception instance, - traceback type). - - .. versionchanged:: 8.2 - ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and - ``mix_stderr`` has been removed. - - .. versionadded:: 8.0 - Added ``return_value``. - """ - - def __init__( - self, - runner: CliRunner, - stdout_bytes: bytes, - stderr_bytes: bytes, - output_bytes: bytes, - return_value: t.Any, - exit_code: int, - exception: BaseException | None, - exc_info: tuple[type[BaseException], BaseException, TracebackType] - | None = None, - ): - self.runner = runner - self.stdout_bytes = stdout_bytes - self.stderr_bytes = stderr_bytes - self.output_bytes = output_bytes - self.return_value = return_value - self.exit_code = exit_code - self.exception = exception - self.exc_info = exc_info - - @property - def output(self) -> str: - """The terminal output as unicode string, as the user would see it. - - .. versionchanged:: 8.2 - No longer a proxy for ``self.stdout``. Now has its own independent stream - that is mixing `` and ``, in the order they were written. - """ - return self.output_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string. - - .. versionchanged:: 8.2 - No longer raise an exception, always returns the `` string. - """ - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from `` writes - to ``. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param catch_exceptions: Whether to catch any exceptions other than - ``SystemExit`` when running :meth:`~CliRunner.invoke`. - - .. versionchanged:: 8.2 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 8.2 - ``mix_stderr`` parameter has been removed. - """ - - def __init__( - self, - charset: str = "utf-8", - env: cabc.Mapping[str, str | None] | None = None, - echo_stdin: bool = False, - catch_exceptions: bool = True, - ) -> None: - self.charset = charset - self.env: cabc.Mapping[str, str | None] = env or {} - self.echo_stdin = echo_stdin - self.catch_exceptions = catch_exceptions - - def get_default_prog_name(self, cli: Command) -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: cabc.Mapping[str, str | None] | None = None - ) -> cabc.Mapping[str, str | None]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - color: bool = False, - ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up `` with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into `sys.stdin`. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - An additional output stream is returned, which is a mix of - `` and `` streams. - - .. versionchanged:: 8.2 - Always returns the `` stream. - - .. versionchanged:: 8.0 - `` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - stream_mixer = StreamMixer() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - stream_mixer.stdout, encoding=self.charset, name="", mode="w" - ) - - sys.stderr = _NamedTextIOWrapper( - stream_mixer.stderr, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: str | None = None) -> str: - sys.stdout.write(prompt or "") - try: - val = next(text_input).rstrip("\r\n") - except StopIteration as e: - raise EOFError() from e - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: str | None = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - try: - return next(text_input).rstrip("\r\n") - except StopIteration as e: - raise EOFError() from e - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - old__compat_should_strip_ansi = _compat.should_strip_ansi - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - _compat.should_strip_ansi = should_strip_ansi - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - _compat.should_strip_ansi = old__compat_should_strip_ansi - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: Command, - args: str | cabc.Sequence[str] | None = None, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - catch_exceptions: bool | None = None, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. If :data:`None`, the value - from :class:`CliRunner` is used. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - The result object has the ``output_bytes`` attribute with - the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would - see it in its terminal. - - .. versionchanged:: 8.2 - The result object always returns the ``stderr_bytes`` stream. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - if catch_exceptions is None: - catch_exceptions = self.catch_exceptions - - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: BaseException | None = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast("int | t.Any | None", e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - sys.stderr.flush() - stdout = outstreams[0].getvalue() - stderr = outstreams[1].getvalue() - output = outstreams[2].getvalue() - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - output_bytes=output, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: str | os.PathLike[str] | None = None - ) -> cabc.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - import shutil - - try: - shutil.rmtree(dt) - except OSError: - pass diff --git a/.venv/lib/python3.12/site-packages/click/types.py b/.venv/lib/python3.12/site-packages/click/types.py deleted file mode 100644 index e71c1c2..0000000 --- a/.venv/lib/python3.12/site-packages/click/types.py +++ /dev/null @@ -1,1209 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - -ParamTypeValue = t.TypeVar("ParamTypeValue") - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[str | None] = None - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.NoReturn: - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType, t.Generic[ParamTypeValue]): - """The choice type allows a value to be checked against a fixed set - of supported values. - - You may pass any iterable value which will be converted to a tuple - and thus will only be iterated once. - - The resulting value will always be one of the originally passed choices. - See :meth:`normalize_choice` for more info on the mapping of strings - to choices. See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - - .. versionchanged:: 8.2.0 - Non-``str`` ``choices`` are now supported. It can additionally be any - iterable. Before you were not recommended to pass anything but a list or - tuple. - - .. versionadded:: 8.2.0 - Choice normalization can be overridden via :meth:`normalize_choice`. - """ - - name = "choice" - - def __init__( - self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True - ) -> None: - self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def _normalized_mapping( - self, ctx: Context | None = None - ) -> cabc.Mapping[ParamTypeValue, str]: - """ - Returns mapping where keys are the original choices and the values are - the normalized values that are accepted via the command line. - - This is a simple wrapper around :meth:`normalize_choice`, use that - instead which is supported. - """ - return { - choice: self.normalize_choice( - choice=choice, - ctx=ctx, - ) - for choice in self.choices - } - - def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: - """ - Normalize a choice value, used to map a passed string to a choice. - Each choice must have a unique normalized value. - - By default uses :meth:`Context.token_normalize_func` and if not case - sensitive, convert it to a casefolded value. - - .. versionadded:: 8.2.0 - """ - normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(normed_value) - - if not self.case_sensitive: - normed_value = normed_value.casefold() - - return normed_value - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - if param.param_type_name == "option" and not param.show_choices: # type: ignore - choice_metavars = [ - convert_type(type(choice)).name.upper() for choice in self.choices - ] - choices_str = "|".join([*dict.fromkeys(choice_metavars)]) - else: - choices_str = "|".join( - [str(i) for i in self._normalized_mapping(ctx=ctx).values()] - ) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: - """ - Message shown when no choice is passed. - - .. versionchanged:: 8.2.0 Added ``ctx`` argument. - """ - return _("Choose from:\n\t{choices}").format( - choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) - ) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> ParamTypeValue: - """ - For a given value from the parser, normalize it and find its - matching normalized value in the list of choices. Then return the - matched "original" choice. - """ - normed_value = self.normalize_choice(choice=value, ctx=ctx) - normalized_mapping = self._normalized_mapping(ctx=ctx) - - try: - return next( - original - for original, normalized in normalized_mapping.items() - if normalized == normed_value - ) - except StopIteration: - self.fail( - self.get_invalid_choice_message(value=value, ctx=ctx), - param=param, - ctx=ctx, - ) - - def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: - """Get the error message when the given choice is invalid. - - :param value: The invalid value. - - .. versionadded:: 8.2 - """ - choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) - return ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: cabc.Sequence[str] | None = None): - self.formats: cabc.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[type[t.Any]] - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: t.Literal[1, -1], open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - if not open: - return bound - - # Could use math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - bool_states: dict[str, bool] = { - "1": True, - "0": False, - "yes": True, - "no": False, - "true": True, - "false": False, - "on": True, - "off": False, - "t": True, - "f": False, - "y": True, - "n": False, - # Absence of value is considered False. - "": False, - } - """A mapping of string values to boolean states. - - Mapping is inspired by :py:attr:`configparser.ConfigParser.BOOLEAN_STATES` - and extends it. - - .. caution:: - String values are lower-cased, as the ``str_to_bool`` comparison function - below is case-insensitive. - - .. warning:: - The mapping is not exhaustive, and does not cover all possible boolean strings - representations. It will remains as it is to avoid endless bikeshedding. - - Future work my be considered to make this mapping user-configurable from public - API. - """ - - @staticmethod - def str_to_bool(value: str | bool) -> bool | None: - """Convert a string to a boolean value. - - If the value is already a boolean, it is returned as-is. If the value is a - string, it is stripped of whitespaces and lower-cased, then checked against - the known boolean states pre-defined in the `BoolParamType.bool_states` mapping - above. - - Returns `None` if the value does not match any known boolean state. - """ - if isinstance(value, bool): - return value - return BoolParamType.bool_states.get(value.strip().lower()) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> bool: - normalized = self.str_to_bool(value) - if normalized is None: - self.fail( - _( - "{value!r} is not a valid boolean. Recognized values: {states}" - ).format(value=value, states=", ".join(sorted(self.bool_states))), - param, - ctx, - ) - return normalized - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Files can also be opened atomically in which case all writes go into a - separate file in the same folder and upon completion the file will - be moved over to the original location. This is useful if a file - regularly read by other users is modified. - - See :ref:`file-args` for more information. - - .. versionchanged:: 2.0 - Added the ``atomic`` parameter. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool | None = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: str | os.PathLike[str] | t.IO[t.Any], - param: Parameter | None, - ctx: Context | None, - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("str | os.PathLike[str]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast("t.IO[t.Any]", lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: type[t.Any] | None = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: str | os.PathLike[str] - ) -> str | bytes | os.PathLike[str]: - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: str | os.PathLike[str], - param: Parameter | None, - ctx: Context | None, - ) -> str | bytes | os.PathLike[str]: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - rv = os.path.realpath(rv) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} {filename!r} is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: - self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple( - ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) - ) - - -def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() - - -class OptionHelpExtra(t.TypedDict, total=False): - envvars: tuple[str, ...] - default: str - range: str - required: str diff --git a/.venv/lib/python3.12/site-packages/click/utils.py b/.venv/lib/python3.12/site-packages/click/utils.py deleted file mode 100644 index beae26f..0000000 --- a/.venv/lib/python3.12/site-packages/click/utils.py +++ /dev/null @@ -1,627 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.IO[t.Any] | None - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> LazyFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close_intelligently() - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> KeepOpenFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Any | None = None, - file: t.IO[t.Any] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: str | bytes | bytearray | None = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file, color) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: t.Literal["stdin", "stdout", "stderr"], - encoding: str | None = None, - errors: str | None = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name or Path of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) - - return f - - -def format_filename( - filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict"``. This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: str | None = None, _main: ModuleType | None = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: cabc.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> list[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/.venv/lib/python3.12/site-packages/distutils-precedence.pth b/.venv/lib/python3.12/site-packages/distutils-precedence.pth deleted file mode 100644 index 7f009fe..0000000 --- a/.venv/lib/python3.12/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/METADATA b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/METADATA deleted file mode 100644 index 46028fb..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/METADATA +++ /dev/null @@ -1,91 +0,0 @@ -Metadata-Version: 2.4 -Name: Flask -Version: 3.1.2 -Summary: A simple framework for building complex web applications. -Maintainer-email: Pallets -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: blinker>=1.9.0 -Requires-Dist: click>=8.1.3 -Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10' -Requires-Dist: itsdangerous>=2.2.0 -Requires-Dist: jinja2>=3.1.2 -Requires-Dist: markupsafe>=2.1.1 -Requires-Dist: werkzeug>=3.1.0 -Requires-Dist: asgiref>=3.2 ; extra == "async" -Requires-Dist: python-dotenv ; extra == "dotenv" -Project-URL: Changes, https://flask.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/flask/ -Provides-Extra: async -Provides-Extra: dotenv - -
- -# Flask - -Flask is a lightweight [WSGI] web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around [Werkzeug] -and [Jinja], and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -[WSGI]: https://wsgi.readthedocs.io/ -[Werkzeug]: https://werkzeug.palletsprojects.com/ -[Jinja]: https://jinja.palletsprojects.com/ - -## A Simple Example - -```python -# save this as app.py -from flask import Flask - -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello, World!" -``` - -``` -$ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) -``` - -## Donate - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/RECORD b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/RECORD deleted file mode 100644 index 383a360..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -../../../bin/flask,sha256=1iixAh0kcQpCOpfDaMYv2t8xafvfjQMi95kZCrRTgAc,208 -flask-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask-3.1.2.dist-info/METADATA,sha256=oRg63DAAIcoLAr7kzTgIEKfm8_4HMTRpmWmIptdY_js,3167 -flask-3.1.2.dist-info/RECORD,, -flask-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask-3.1.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -flask-3.1.2.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40 -flask-3.1.2.dist-info/licenses/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask/__init__.py,sha256=mHvJN9Swtl1RDtjCqCIYyIniK_SZ_l_hqUynOzgpJ9o,2701 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-312.pyc,, -flask/__pycache__/__main__.cpython-312.pyc,, -flask/__pycache__/app.cpython-312.pyc,, -flask/__pycache__/blueprints.cpython-312.pyc,, -flask/__pycache__/cli.cpython-312.pyc,, -flask/__pycache__/config.cpython-312.pyc,, -flask/__pycache__/ctx.cpython-312.pyc,, -flask/__pycache__/debughelpers.cpython-312.pyc,, -flask/__pycache__/globals.cpython-312.pyc,, -flask/__pycache__/helpers.cpython-312.pyc,, -flask/__pycache__/logging.cpython-312.pyc,, -flask/__pycache__/sessions.cpython-312.pyc,, -flask/__pycache__/signals.cpython-312.pyc,, -flask/__pycache__/templating.cpython-312.pyc,, -flask/__pycache__/testing.cpython-312.pyc,, -flask/__pycache__/typing.cpython-312.pyc,, -flask/__pycache__/views.cpython-312.pyc,, -flask/__pycache__/wrappers.cpython-312.pyc,, -flask/app.py,sha256=XGqgFRsLgBhzIoB2HSftoMTIM3hjDiH6rdV7c3g3IKc,61744 -flask/blueprints.py,sha256=p5QE2lY18GItbdr_RKRpZ8Do17g0PvQGIgZkSUDhX2k,4541 -flask/cli.py,sha256=Pfh72-BxlvoH0QHCDOc1HvXG7Kq5Xetf3zzNz2kNSHk,37184 -flask/config.py,sha256=PiqF0DPam6HW0FH4CH1hpXTBe30NSzjPEOwrz1b6kt0,13219 -flask/ctx.py,sha256=sPKzahqtgxaS7O0y9E_NzUJNUDyTD6M4GkDrVu2fU3Y,15064 -flask/debughelpers.py,sha256=PGIDhStW_efRjpaa3zHIpo-htStJOR41Ip3OJWPYBwo,6080 -flask/globals.py,sha256=XdQZmStBmPIs8t93tjx6pO7Bm3gobAaONWkFcUHaGas,1713 -flask/helpers.py,sha256=rJZge7_J288J1UQv5-kNf4oEaw332PP8NTW0QRIBbXE,23517 -flask/json/__init__.py,sha256=hLNR898paqoefdeAhraa5wyJy-bmRB2k2dV4EgVy2Z8,5602 -flask/json/__pycache__/__init__.cpython-312.pyc,, -flask/json/__pycache__/provider.cpython-312.pyc,, -flask/json/__pycache__/tag.cpython-312.pyc,, -flask/json/provider.py,sha256=5imEzY5HjV2HoUVrQbJLqXCzMNpZXfD0Y1XqdLV2XBA,7672 -flask/json/tag.py,sha256=DhaNwuIOhdt2R74oOC9Y4Z8ZprxFYiRb5dUP5byyINw,9281 -flask/logging.py,sha256=8sM3WMTubi1cBb2c_lPkWpN0J8dMAqrgKRYLLi1dCVI,2377 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/sansio/README.md,sha256=-0X1tECnilmz1cogx-YhNw5d7guK7GKrq_DEV2OzlU0,228 -flask/sansio/__pycache__/app.cpython-312.pyc,, -flask/sansio/__pycache__/blueprints.cpython-312.pyc,, -flask/sansio/__pycache__/scaffold.cpython-312.pyc,, -flask/sansio/app.py,sha256=5EbxwHOchgcpZqQyalA9vyDBopknOvDg6BVwXFyFD2s,38099 -flask/sansio/blueprints.py,sha256=Tqe-7EkZ-tbWchm8iDoCfD848f0_3nLv6NNjeIPvHwM,24637 -flask/sansio/scaffold.py,sha256=wSASXYdFRWJmqcL0Xq-T7N-PDVUSiFGvjO9kPZg58bk,30371 -flask/sessions.py,sha256=duvYGmCGh_H3cgMuy2oeSjrCsCvLylF4CBKOXpN0Qms,15480 -flask/signals.py,sha256=V7lMUww7CqgJ2ThUBn1PiatZtQanOyt7OZpu2GZI-34,750 -flask/templating.py,sha256=IHsdsF-eBJPCJE0AJLCi1VhhnytOGdzHCn3yThz87c4,7536 -flask/testing.py,sha256=zzC7XxhBWOP9H697IV_4SG7Lg3Lzb5PWiyEP93_KQXE,10117 -flask/typing.py,sha256=L-L5t2jKgS0aOmVhioQ_ylqcgiVFnA6yxO-RLNhq-GU,3293 -flask/views.py,sha256=xzJx6oJqGElThtEghZN7ZQGMw5TDFyuRxUkecwRuAoA,6962 -flask/wrappers.py,sha256=jUkv4mVek2Iq4hwxd4RvqrIMb69Bv0PElDgWLmd5ORo,9406 diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/entry_points.txt deleted file mode 100644 index eec6733..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask=flask.cli:main - diff --git a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/.venv/lib/python3.12/site-packages/flask-3.1.2.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/flask/__init__.py b/.venv/lib/python3.12/site-packages/flask/__init__.py deleted file mode 100644 index 1fdc50c..0000000 --- a/.venv/lib/python3.12/site-packages/flask/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import json as json -from .app import Flask as Flask -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import abort as abort -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import redirect as redirect -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string -from .templating import stream_template as stream_template -from .templating import stream_template_string as stream_template_string -from .wrappers import Request as Request -from .wrappers import Response as Response - -if not t.TYPE_CHECKING: - - def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask 3.2. Use feature detection or" - " 'importlib.metadata.version(\"flask\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask") - - raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/flask/__main__.py b/.venv/lib/python3.12/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/.venv/lib/python3.12/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c882fc4..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 5f3e4fd..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc deleted file mode 100644 index 6aaa9c6..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/app.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc deleted file mode 100644 index 66801e5..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/blueprints.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc deleted file mode 100644 index 4f76c8d..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/cli.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc deleted file mode 100644 index c3c45fb..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/config.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc deleted file mode 100644 index 144284e..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/ctx.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc deleted file mode 100644 index 2a23b75..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/debughelpers.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc deleted file mode 100644 index fb16968..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/globals.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc deleted file mode 100644 index 7011b26..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/helpers.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc deleted file mode 100644 index fb94bab..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/logging.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc deleted file mode 100644 index a8bad09..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/sessions.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc deleted file mode 100644 index 9699d17..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/signals.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc deleted file mode 100644 index 1f81071..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/templating.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc deleted file mode 100644 index acca5c4..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/testing.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc deleted file mode 100644 index 811688d..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/typing.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc deleted file mode 100644 index 652407b..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/views.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc deleted file mode 100644 index ffea4db..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/__pycache__/wrappers.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/app.py b/.venv/lib/python3.12/site-packages/flask/app.py deleted file mode 100644 index 1232b03..0000000 --- a/.venv/lib/python3.12/site-packages/flask/app.py +++ /dev/null @@ -1,1536 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import sys -import typing as t -import weakref -from datetime import timedelta -from inspect import iscoroutinefunction -from itertools import chain -from types import TracebackType -from urllib.parse import quote as _url_quote - -import click -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.routing import BuildError -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.serving import is_running_from_reloader -from werkzeug.wrappers import Response as BaseResponse -from werkzeug.wsgi import get_host - -from . import cli -from . import typing as ft -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import g -from .globals import request -from .globals import request_ctx -from .globals import session -from .helpers import get_debug_flag -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import send_from_directory -from .sansio.app import App -from .sansio.scaffold import _sentinel -from .sessions import SecureCookieSessionInterface -from .sessions import SessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - - from .testing import FlaskClient - from .testing import FlaskCliRunner - from .typing import HeadersValue - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(App): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - default_config = ImmutableDict( - { - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "SECRET_KEY": None, - "SECRET_KEY_FALLBACKS": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "TRUSTED_HOSTS": None, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_PARTITIONED": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "MAX_FORM_MEMORY_SIZE": 500_000, - "MAX_FORM_PARTS": 1_000, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - "PROVIDE_AUTOMATIC_OPTIONS": True, - } - ) - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class: type[Request] = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class: type[Response] = Response - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface: SessionInterface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_url_path=static_url_path, - static_folder=static_folder, - static_host=static_host, - host_matching=host_matching, - subdomain_matching=subdomain_matching, - template_folder=template_folder, - instance_path=instance_path, - instance_relative_config=instance_relative_config, - root_path=root_path, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = cli.AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert bool(static_host) == host_matching, ( - "Invalid static_host/host_matching combination" - ) - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = None - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) - - def open_instance_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to the application's instance folder - :attr:`instance_path`. Unlike :meth:`open_resource`, files in the - instance folder can be opened for writing. - - :param resource: Path to the resource relative to :attr:`instance_path`. - :param mode: Open the file in this mode. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - path = os.path.join(self.instance_path, resource) - - if "b" in mode: - return open(path, mode) - - return open(path, mode, encoding=encoding) - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] - - if auto_reload is None: - auto_reload = self.debug - - options["auto_reload"] = auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=self.url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = self.json.dumps - return rv - - def create_url_adapter(self, request: Request | None) -> MapAdapter | None: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionchanged:: 3.1 - If :data:`SERVER_NAME` is set, it does not restrict requests to - only that domain, for both ``subdomain_matching`` and - ``host_matching``. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - - .. versionchanged:: 0.9 - This can be called outside a request when the URL adapter is created - for an application context. - - .. versionadded:: 0.6 - """ - if request is not None: - if (trusted_hosts := self.config["TRUSTED_HOSTS"]) is not None: - request.trusted_hosts = trusted_hosts - - # Check trusted_hosts here until bind_to_environ does. - request.host = get_host(request.environ, request.trusted_hosts) # pyright: ignore - subdomain = None - server_name = self.config["SERVER_NAME"] - - if self.url_map.host_matching: - # Don't pass SERVER_NAME, otherwise it's used and the actual - # host is ignored, which breaks host matching. - server_name = None - elif not self.subdomain_matching: - # Werkzeug doesn't implement subdomain matching yet. Until then, - # disable it by forcing the current subdomain to the default, or - # the empty string. - subdomain = self.url_map.default_subdomain or "" - - return self.url_map.bind_to_environ( - request.environ, server_name=server_name, subdomain=subdomain - ) - - # Need at least SERVER_NAME to match/build outside a request. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def raise_routing_exception(self, request: Request) -> t.NoReturn: - """Intercept routing exceptions and possibly do something else. - - In debug mode, intercept a routing redirect and replace it with - an error if the body will be discarded. - - With modern Werkzeug this shouldn't occur, since it now uses a - 308 status which tells the browser to resend the method and - body. - - .. versionchanged:: 2.1 - Don't intercept 307 and 308 redirects. - - :meta private: - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.routing_exception.code in {307, 308} - or request.method in {"GET", "HEAD", "OPTIONS"} - ): - raise request.routing_exception # type: ignore[misc] - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def update_template_context(self, context: dict[str, t.Any]) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[str | None] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(self.ensure_sync(func)()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict[str, t.Any]: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - def run( - self, - host: str | None = None, - port: int | None = None, - debug: bool | None = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Ignore this call so that it doesn't start another server if - # the 'flask run' command is used. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - if not is_running_from_reloader(): - click.secho( - " * Ignoring a call to 'app.run()' that would block" - " the current 'flask' CLI command.\n" - " Only call 'app.run()' in an 'if __name__ ==" - ' "__main__"\' guard.', - fg="red", - ) - - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, env var overrides existing value - if "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.debug, self.name) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) # type: ignore - - def handle_http_exception( - self, e: HTTPException - ) -> HTTPException | ft.ResponseReturnValue: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e, request.blueprints) - if handler is None: - return e - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_user_exception( - self, e: Exception - ) -> HTTPException | ft.ResponseReturnValue: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e, request.blueprints) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) - propagate = self.config["PROPAGATE_EXCEPTIONS"] - - if propagate is None: - propagate = self.testing or self.debug - - if propagate: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: InternalServerError | ft.ResponseReturnValue - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error, request.blueprints) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def dispatch_request(self) -> ft.ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = request_ctx.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule: Rule = req.url_rule # type: ignore[assignment] - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] - return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return] - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self._got_first_request = True - - try: - request_started.send(self, _async_wrapper=self.ensure_sync) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: ft.ResponseReturnValue | HTTPException, - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send( - self, _async_wrapper=self.ensure_sync, response=response - ) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = request_ctx.url_adapter - methods = adapter.allowed_methods() # type: ignore[union-attr] - rv = self.response_class() - rv.allow.update(methods) - return rv - - def ensure_sync(self, func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine[t.Any, t.Any, t.Any]] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - return asgiref_async_to_sync(func) - - def url_for( - self, - /, - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, - ) -> str: - """Generate a URL to the given endpoint with the given values. - - This is called by :func:`flask.url_for`, and can be called - directly as well. - - An *endpoint* is the name of a URL rule, usually added with - :meth:`@app.route() `, and usually the same name as the - view function. A route defined in a :class:`~flask.Blueprint` - will prepend the blueprint's name separated by a ``.`` to the - endpoint. - - In some cases, such as email messages, you want URLs to include - the scheme and domain, like ``https://example.com/hello``. When - not in an active request, URLs will be external by default, but - this requires setting :data:`SERVER_NAME` so Flask knows what - domain to use. :data:`APPLICATION_ROOT` and - :data:`PREFERRED_URL_SCHEME` should also be configured as - needed. This config is only used when not in an active request. - - Functions can be decorated with :meth:`url_defaults` to modify - keyword arguments before the URL is built. - - If building fails for some reason, such as an unknown endpoint - or incorrect values, the app's :meth:`handle_url_build_error` - method is called. If that returns a string, that is returned, - otherwise a :exc:`~werkzeug.routing.BuildError` is raised. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it - is external. - :param _external: If given, prefer the URL to be internal - (False) or require it to be external (True). External URLs - include the scheme and domain. When not in an active - request, URLs are external by default. - :param values: Values to use for the variable parts of the URL - rule. Unknown keys are appended as query string arguments, - like ``?a=b&c=d``. - - .. versionadded:: 2.2 - Moved from ``flask.url_for``, which calls this method. - """ - req_ctx = _cv_request.get(None) - - if req_ctx is not None: - url_adapter = req_ctx.url_adapter - blueprint_name = req_ctx.request.blueprint - - # If the endpoint starts with "." and the request matches a - # blueprint, the endpoint is relative to the blueprint. - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - # When in a request, generate a URL without scheme and - # domain by default, unless a scheme is given. - if _external is None: - _external = _scheme is not None - else: - app_ctx = _cv_app.get(None) - - # If called by helpers.url_for, an app context is active, - # use its url_adapter. Otherwise, app.url_for was called - # directly, build an adapter. - if app_ctx is not None: - url_adapter = app_ctx.url_adapter - else: - url_adapter = self.create_url_adapter(None) - - if url_adapter is None: - raise RuntimeError( - "Unable to build URLs outside an active request" - " without 'SERVER_NAME' configured. Also configure" - " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" - " needed." - ) - - # When outside a request, generate a URL with scheme and - # domain by default. - if _external is None: - _external = True - - # It is an error to set _scheme when _external=False, in order - # to avoid accidental insecure URLs. - if _scheme is not None and not _external: - raise ValueError("When specifying '_scheme', '_external' must be True.") - - self.inject_url_defaults(endpoint, values) - - try: - rv = url_adapter.build( # type: ignore[union-attr] - endpoint, - values, - method=_method, - url_scheme=_scheme, - force_external=_external, - ) - except BuildError as error: - values.update( - _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external - ) - return self.handle_url_build_error(error, endpoint, values) - - if _anchor is not None: - _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") - rv = f"{rv}#{_anchor}" - - return rv - - def make_response(self, rv: ft.ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``list`` - A list that will be jsonify'd before being returned. - - ``generator`` or ``iterator`` - A generator that returns ``str`` or ``bytes`` to be - streamed as the response. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 2.2 - A generator will be converted to a streaming response. - A list will be converted to a JSON response. - - .. versionchanged:: 1.1 - A dict will be converted to a JSON response. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status: int | None = None - headers: HeadersValue | None = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv # type: ignore[misc] - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv # pyright: ignore - else: - rv, status = rv # type: ignore[assignment,misc] - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, cabc.Iterator): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class( - rv, # pyright: ignore - status=status, - headers=headers, # type: ignore[arg-type] - ) - status = headers = None - elif isinstance(rv, (dict, list)): - rv = self.json.response(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type( - rv, # type: ignore[arg-type] - request.environ, - ) - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it" - f" was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it was a" - f" {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) - - return rv - - def preprocess_request(self) -> ft.ResponseReturnValue | None: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv # type: ignore[no-any-return] - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = request_ctx._get_current_object() # type: ignore[attr-defined] - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def do_teardown_appcontext( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: WSGIEnvironment) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with app.test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: BaseException | None = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if "werkzeug.debug.preserve_context" in environ: - environ["werkzeug.debug.preserve_context"](_cv_app.get()) - environ["werkzeug.debug.preserve_context"](_cv_request.get()) - - if error is not None and self.should_ignore_error(error): - error = None - - ctx.pop(error) - - def __call__( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/.venv/lib/python3.12/site-packages/flask/blueprints.py b/.venv/lib/python3.12/site-packages/flask/blueprints.py deleted file mode 100644 index b6d4e43..0000000 --- a/.venv/lib/python3.12/site-packages/flask/blueprints.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from datetime import timedelta - -from .cli import AppGroup -from .globals import current_app -from .helpers import send_from_directory -from .sansio.blueprints import Blueprint as SansioBlueprint -from .sansio.blueprints import BlueprintSetupState as BlueprintSetupState # noqa -from .sansio.scaffold import _sentinel - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -class Blueprint(SansioBlueprint): - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore - ) -> None: - super().__init__( - name, - import_name, - static_folder, - static_url_path, - template_folder, - url_prefix, - subdomain, - url_defaults, - root_path, - cli_group, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. The - blueprint-relative equivalent of the app's :meth:`~.Flask.open_resource` - method. - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) diff --git a/.venv/lib/python3.12/site-packages/flask/cli.py b/.venv/lib/python3.12/site-packages/flask/cli.py deleted file mode 100644 index ed11f25..0000000 --- a/.venv/lib/python3.12/site-packages/flask/cli.py +++ /dev/null @@ -1,1135 +0,0 @@ -from __future__ import annotations - -import ast -import collections.abc as cabc -import importlib.metadata -import inspect -import os -import platform -import re -import sys -import traceback -import typing as t -from functools import update_wrapper -from operator import itemgetter -from types import ModuleType - -import click -from click.core import ParameterSource -from werkzeug import run_simple -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_load_dotenv - -if t.TYPE_CHECKING: - import ssl - - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(module: ModuleType) -> Flask: - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = app_factory() - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory '{attr_name}' in module '{module.__name__}'," - " but could not call it without arguments. Use" - f" '{module.__name__}:{attr_name}(args)'" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify one." - ) - - -def _called_with_wrong_args(f: t.Callable[..., Flask]) -> bool: - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(module: ModuleType, app_name: str) -> Flask: - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = [] - kwargs = {} - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = { - kw.arg: ast.literal_eval(kw.value) - for kw in expr.keywords - if kw.arg is not None - } - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = attr(*args, **kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path: str) -> str: - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[True] = True -) -> Flask: ... - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[False] = ... -) -> Flask | None: ... - - -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: bool = True -) -> Flask | None: - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: # type: ignore[union-attr] - raise NoAppException( - f"While importing {module_name!r}, an ImportError was" - f" raised:\n\n{traceback.format_exc()}" - ) from None - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from None - else: - return None - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(module) - else: - return find_app_by_string(module, app_name) - - -def get_version(ctx: click.Context, param: click.Parameter, value: t.Any) -> None: - if not value or ctx.resilient_parsing: - return - - flask_version = importlib.metadata.version("flask") - werkzeug_version = importlib.metadata.version("werkzeug") - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {flask_version}\n" - f"Werkzeug {werkzeug_version}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the Flask version.", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - - .. versionchanged:: 3.1 - Added the ``load_dotenv_defaults`` parameter and attribute. - """ - - def __init__( - self, - app_import_path: str | None = None, - create_app: t.Callable[..., Flask] | None = None, - set_debug_flag: bool = True, - load_dotenv_defaults: bool = True, - ) -> None: - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data: dict[t.Any, t.Any] = {} - self.set_debug_flag = set_debug_flag - - self.load_dotenv_defaults = get_load_dotenv(load_dotenv_defaults) - """Whether default ``.flaskenv`` and ``.env`` files should be loaded. - - ``ScriptInfo`` doesn't load anything, this is for reference when doing - the load elsewhere during processing. - - .. versionadded:: 3.1 - """ - - self._loaded_app: Flask | None = None - - def load_app(self) -> Flask: - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - if self._loaded_app is not None: - return self._loaded_app - app: Flask | None = None - if self.create_app is not None: - app = self.create_app() - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, maxsplit=1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(import_name, None, raise_if_not_found=False) - - if app is not None: - break - - if app is None: - raise NoAppException( - "Could not locate a Flask application. Use the" - " 'flask --app' option, 'FLASK_APP' environment" - " variable, or a 'wsgi.py' or 'app.py' file in the" - " current directory." - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def with_appcontext(f: F) -> F: - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. - - Custom commands (and their options) registered under ``app.cli`` or - ``blueprint.cli`` will always have an app context available, this - decorator is not required in that case. - - .. versionchanged:: 2.2 - The app context is active for subcommands as well as the - decorated callback. The app context is always available to - ``app.cli`` command and parameter callbacks. - """ - - @click.pass_context - def decorator(ctx: click.Context, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - if not current_app: - app = ctx.ensure_object(ScriptInfo).load_app() - ctx.with_resource(app.app_context()) - - return ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) # type: ignore[return-value] - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Command]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f: t.Callable[..., t.Any]) -> click.Command: - if wrap_for_ctx: - f = with_appcontext(f) - return super(AppGroup, self).command(*args, **kwargs)(f) # type: ignore[no-any-return] - - return decorator - - def group( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Group]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return super().group(*args, **kwargs) # type: ignore[no-any-return] - - -def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: - if value is None: - return None - - info = ctx.ensure_object(ScriptInfo) - info.app_import_path = value - return value - - -# This option is eager so the app will be available if --help is given. -# --help is also eager, so --app must be before it in the param list. -# no_args_is_help bypasses eager processing, so this option must be -# processed manually in that case to ensure FLASK_APP gets picked up. -_app_option = click.Option( - ["-A", "--app"], - metavar="IMPORT", - help=( - "The Flask application or factory function to load, in the form 'module:name'." - " Module can be a dotted import or file path. Name is not required if it is" - " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" - " pass arguments." - ), - is_eager=True, - expose_value=False, - callback=_set_app, -) - - -def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: - # If the flag isn't provided, it will default to False. Don't use - # that, let debug be set by env in that case. - source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] - - if source is not None and source in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ): - return None - - # Set with env var instead of ScriptInfo.load so that it can be - # accessed early during a factory function. - os.environ["FLASK_DEBUG"] = "1" if value else "0" - return value - - -_debug_option = click.Option( - ["--debug/--no-debug"], - help="Set debug mode.", - expose_value=False, - callback=_set_debug, -) - - -def _env_file_callback( - ctx: click.Context, param: click.Option, value: str | None -) -> str | None: - try: - import dotenv # noqa: F401 - except ImportError: - # Only show an error if a value was passed, otherwise we still want to - # call load_dotenv and show a message without exiting. - if value is not None: - raise click.BadParameter( - "python-dotenv must be installed to load an env file.", - ctx=ctx, - param=param, - ) from None - - # Load if a value was passed, or we want to load default files, or both. - if value is not None or ctx.obj.load_dotenv_defaults: - load_dotenv(value, load_defaults=ctx.obj.load_dotenv_defaults) - - return value - - -# This option is eager so env vars are loaded as early as possible to be -# used by other options. -_env_file_option = click.Option( - ["-e", "--env-file"], - type=click.Path(exists=True, dir_okay=False), - help=( - "Load environment variables from this file, taking precedence over" - " those set by '.env' and '.flaskenv'. Variables set directly in the" - " environment take highest precedence. python-dotenv must be installed." - ), - is_eager=True, - expose_value=False, - callback=_env_file_callback, -) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag. - - .. versionchanged:: 3.1 - ``-e path`` takes precedence over default ``.env`` and ``.flaskenv`` files. - - .. versionchanged:: 2.2 - Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. - - .. versionchanged:: 2.2 - An app context is pushed when running ``app.cli`` commands, so - ``@with_appcontext`` is no longer required for those commands. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands: bool = True, - create_app: t.Callable[..., Flask] | None = None, - add_version_option: bool = True, - load_dotenv: bool = True, - set_debug_flag: bool = True, - **extra: t.Any, - ) -> None: - params: list[click.Parameter] = list(extra.pop("params", None) or ()) - # Processing is done with option callbacks instead of a group - # callback. This allows users to make a custom group callback - # without losing the behavior. --env-file must come first so - # that it is eagerly evaluated before --app. - params.extend((_env_file_option, _app_option, _debug_option)) - - if add_version_option: - params.append(version_option) - - if "context_settings" not in extra: - extra["context_settings"] = {} - - extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") - - super().__init__(params=params, **extra) - - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self) -> None: - if self._loaded_plugin_commands: - return - - if sys.version_info >= (3, 10): - from importlib import metadata - else: - # Use a backport on Python < 3.10. We technically have - # importlib.metadata on 3.8+, but the API changed in 3.10, - # so use the backport for consistency. - import importlib_metadata as metadata # pyright: ignore - - for ep in metadata.entry_points(group="flask.commands"): - self.add_command(ep.load(), ep.name) - - self._loaded_plugin_commands = True - - def get_command(self, ctx: click.Context, name: str) -> click.Command | None: - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - app = info.load_app() - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - return None - - # Push an app context for the loaded app unless it is already - # active somehow. This makes the context available to parameter - # and command callbacks without needing @with_appcontext. - if not current_app or current_app._get_current_object() is not app: # type: ignore[attr-defined] - ctx.with_resource(app.app_context()) - - return app.cli.get_command(ctx, name) - - def list_commands(self, ctx: click.Context) -> list[str]: - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: click.Context | None = None, - **extra: t.Any, - ) -> click.Context: - # Set a flag to tell app.run to become a no-op. If app.run was - # not in a __name__ == __main__ guard, it would start the server - # when importing, blocking whatever command is being called. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - if "obj" not in extra and "obj" not in self.context_settings: - extra["obj"] = ScriptInfo( - create_app=self.create_app, - set_debug_flag=self.set_debug_flag, - load_dotenv_defaults=self.load_dotenv, - ) - - return super().make_context(info_name, args, parent=parent, **extra) - - def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: - if (not args and self.no_args_is_help) or ( - len(args) == 1 and args[0] in self.get_help_option_names(ctx) - ): - # Attempt to load --env-file and --app early in case they - # were given as env vars. Otherwise no_args_is_help will not - # see commands from app.cli. - _env_file_option.handle_parse_result(ctx, {}, []) - _app_option.handle_parse_result(ctx, {}, []) - - return super().parse_args(ctx, args) - - -def _path_is_ancestor(path: str, other: str) -> bool: - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv( - path: str | os.PathLike[str] | None = None, load_defaults: bool = True -) -> bool: - """Load "dotenv" files to set environment variables. A given path takes - precedence over ``.env``, which takes precedence over ``.flaskenv``. After - loading and combining these files, values are only set if the key is not - already set in ``os.environ``. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location. - :param load_defaults: Search for and load the default ``.flaskenv`` and - ``.env`` files. - :return: ``True`` if at least one env var was loaded. - - .. versionchanged:: 3.1 - Added the ``load_defaults`` parameter. A given path takes precedence - over default files. - - .. versionchanged:: 2.0 - The current directory is not changed to the location of the - loaded file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - try: - import dotenv - except ImportError: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env files present. Install python-dotenv" - " to use them.", - fg="yellow", - err=True, - ) - - return False - - data: dict[str, str | None] = {} - - if load_defaults: - for default_name in (".flaskenv", ".env"): - if not (default_path := dotenv.find_dotenv(default_name, usecwd=True)): - continue - - data |= dotenv.dotenv_values(default_path, encoding="utf-8") - - if path is not None and os.path.isfile(path): - data |= dotenv.dotenv_values(path, encoding="utf-8") - - for key, value in data.items(): - if key in os.environ or value is None: - continue - - os.environ[key] = value - - return bool(data) # True if at least one env var was loaded. - - -def show_server_banner(debug: bool, app_import_path: str | None) -> None: - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if is_running_from_reloader(): - return - - if app_import_path is not None: - click.echo(f" * Serving Flask app '{app_import_path}'") - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self) -> None: - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - try: - import ssl - except ImportError: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) from None - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx: click.Context, param: click.Parameter, value: t.Any) -> t.Any: - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - try: - import ssl - except ImportError: - is_context = False - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key" is not used.', - ctx, - param, - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - items = self.split_envvar_value(value) - # can't call no-arg super() inside list comprehension until Python 3.12 - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", - type=CertParamType(), - help="Specify a certificate file to use HTTPS.", - is_eager=True, -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@click.option( - "--exclude-patterns", - default=None, - type=SeparatedPathType(), - help=( - "Files matching these fnmatch patterns will not trigger a reload" - " on change. Multiple patterns are separated by" - f" {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info: ScriptInfo, - host: str, - port: int, - reload: bool, - debugger: bool, - with_threads: bool, - cert: ssl.SSLContext | tuple[str, str | None] | t.Literal["adhoc"] | None, - extra_files: list[str] | None, - exclude_patterns: list[str] | None, -) -> None: - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default with the '--debug' - option. - """ - try: - app: WSGIApplication = info.load_app() # pyright: ignore - except Exception as e: - if is_running_from_reloader(): - # When reloading, print out the error immediately, but raise - # it later so the debugger or server can handle it. - traceback.print_exc() - err = e - - def app( - environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - raise err from None - - else: - # When not reloading, raise the error immediately so the - # command fails. - raise e from None - - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(debug, info.app_import_path) - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - ) - - -run_command.params.insert(0, _debug_option) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {current_app.import_name}\n" - f"Instance: {current_app.instance_path}" - ) - ctx: dict[str, t.Any] = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(current_app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), - default="endpoint", - help=( - "Method to sort routes by. 'match' is the order that Flask will match routes" - " when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - rules = list(current_app.url_map.iter_rules()) - - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} - host_matching = current_app.url_map.host_matching - has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) - rows = [] - - for rule in rules: - row = [ - rule.endpoint, - ", ".join(sorted((rule.methods or set()) - ignored_methods)), - ] - - if has_domain: - row.append((rule.host if host_matching else rule.subdomain) or "") - - row.append(rule.rule) - rows.append(row) - - headers = ["Endpoint", "Methods"] - sorts = ["endpoint", "methods"] - - if has_domain: - headers.append("Host" if host_matching else "Subdomain") - sorts.append("domain") - - headers.append("Rule") - sorts.append("rule") - - try: - rows.sort(key=itemgetter(sorts.index(sort))) - except ValueError: - pass - - rows.insert(0, headers) - widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] - rows.insert(1, ["-" * w for w in widths]) - template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) - - for row in rows: - click.echo(template.format(*row)) - - -cli = FlaskGroup( - name="flask", - help="""\ -A general utility script for Flask applications. - -An application to load must be given with the '--app' option, -'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file -in the current directory. -""", -) - - -def main() -> None: - cli.main() - - -if __name__ == "__main__": - main() diff --git a/.venv/lib/python3.12/site-packages/flask/config.py b/.venv/lib/python3.12/site-packages/flask/config.py deleted file mode 100644 index 34ef1a5..0000000 --- a/.venv/lib/python3.12/site-packages/flask/config.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import errno -import json -import os -import types -import typing as t - -from werkzeug.utils import import_string - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .sansio.app import App - - -T = t.TypeVar("T") - - -class ConfigAttribute(t.Generic[T]): - """Makes an attribute forward to the config""" - - def __init__( - self, name: str, get_converter: t.Callable[[t.Any], T] | None = None - ) -> None: - self.__name__ = name - self.get_converter = get_converter - - @t.overload - def __get__(self, obj: None, owner: None) -> te.Self: ... - - @t.overload - def __get__(self, obj: App, owner: type[App]) -> T: ... - - def __get__(self, obj: App | None, owner: type[App] | None = None) -> T | te.Self: - if obj is None: - return self - - rv = obj.config[self.__name__] - - if self.get_converter is not None: - rv = self.get_converter(rv) - - return rv # type: ignore[no-any-return] - - def __set__(self, obj: App, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): # type: ignore[type-arg] - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__( - self, - root_path: str | os.PathLike[str], - defaults: dict[str, t.Any] | None = None, - ) -> None: - super().__init__(defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_prefixed_env( - self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads - ) -> bool: - """Load any environment variables that start with ``FLASK_``, - dropping the prefix from the env key for the config key. Values - are passed through a loading function to attempt to convert them - to more specific types than strings. - - Keys are loaded in :func:`sorted` order. - - The default loading function attempts to parse values as any - valid JSON type, including dicts and lists. - - Specific items in nested dicts can be set by separating the - keys with double underscores (``__``). If an intermediate key - doesn't exist, it will be initialized to an empty dict. - - :param prefix: Load env vars that start with this prefix, - separated with an underscore (``_``). - :param loads: Pass each string value to this function and use - the returned value as the config value. If any error is - raised it is ignored and the value remains a string. The - default is :func:`json.loads`. - - .. versionadded:: 2.1 - """ - prefix = f"{prefix}_" - - for key in sorted(os.environ): - if not key.startswith(prefix): - continue - - value = os.environ[key] - key = key.removeprefix(prefix) - - try: - value = loads(value) - except Exception: - # Keep the value as a string if loading failed. - pass - - if "__" not in key: - # A non-nested key, set directly. - self[key] = value - continue - - # Traverse nested dictionaries with keys separated by "__". - current = self - *parts, tail = key.split("__") - - for part in parts: - # If an intermediate dict does not exist, create it. - if part not in current: - current[part] = {} - - current = current[part] - - current[tail] = value - - return True - - def from_pyfile( - self, filename: str | os.PathLike[str], silent: bool = False - ) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: object | str) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str | os.PathLike[str], - load: t.Callable[[t.IO[t.Any]], t.Mapping[str, t.Any]], - silent: bool = False, - text: bool = True, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import json - app.config.from_file("config.json", load=json.load) - - import tomllib - app.config.from_file("config.toml", load=tomllib.load, text=False) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :param text: Open the file in text or binary mode. - :return: ``True`` if the file was loaded successfully. - - .. versionchanged:: 2.3 - The ``text`` parameter was added. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename, "r" if text else "rb") as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_mapping( - self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with - non-upper keys. - - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/.venv/lib/python3.12/site-packages/flask/ctx.py b/.venv/lib/python3.12/site-packages/flask/ctx.py deleted file mode 100644 index 222e818..0000000 --- a/.venv/lib/python3.12/site-packages/flask/ctx.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations - -import contextvars -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from . import typing as ft -from .globals import _cv_app -from .globals import _cv_request -from .signals import appcontext_popped -from .signals import appcontext_pushed - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Any | None = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - ctx = _cv_app.get(None) - if ctx is not None: - return f"" - return object.__repr__(self) - - -def after_this_request( - f: ft.AfterRequestCallable[t.Any], -) -> ft.AfterRequestCallable[t.Any]: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'after_this_request' can only be used when a request" - " context is active, such as in a view function." - ) - - ctx._after_request_functions.append(f) - return f - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def copy_current_request_context(f: F) -> F: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'copy_current_request_context' can only be used when a" - " request context is active, such as in a view function." - ) - - ctx = ctx.copy() - - def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: - with ctx: - return ctx.app.ensure_sync(f)(*args, **kwargs) - - return update_wrapper(wrapper, f) # type: ignore[return-value] - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _cv_request.get(None) is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _cv_app.get(None) is not None - - -class AppContext: - """The app context contains application-specific information. An app - context is created and pushed at the beginning of each request if - one is not already active. An app context is also pushed when - running CLI commands. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g: _AppCtxGlobals = app.app_ctx_globals_class() - self._cv_tokens: list[contextvars.Token[AppContext]] = [] - - def push(self) -> None: - """Binds the app context to the current context.""" - self._cv_tokens.append(_cv_app.set(self)) - appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - if len(self._cv_tokens) == 1: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - ctx = _cv_app.get() - _cv_app.reset(self._cv_tokens.pop()) - - if ctx is not self: - raise AssertionError( - f"Popped wrong app context. ({ctx!r} instead of {self!r})" - ) - - appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) - - def __enter__(self) -> AppContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains per-request information. The Flask - app creates and pushes it at the beginning of the request, then pops - it at the end of the request. It will create the URL adapter and - request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the - request. When using the interactive debugger, the context will be - restored so ``request`` is still accessible. Similarly, the test - client can preserve the context after the request ends. However, - teardown functions may already have closed some resources such as - database connections. - """ - - def __init__( - self, - app: Flask, - environ: WSGIEnvironment, - request: Request | None = None, - session: SessionMixin | None = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - request.json_module = app.json - self.request: Request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes: list[tuple[str, str]] | None = None - self.session: SessionMixin | None = session - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: list[ft.AfterRequestCallable[t.Any]] = [] - - self._cv_tokens: list[ - tuple[contextvars.Token[RequestContext], AppContext | None] - ] = [] - - def copy(self) -> RequestContext: - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _cv_app.get(None) - - if app_ctx is None or app_ctx.app is not self.app: - app_ctx = self.app.app_context() - app_ctx.push() - else: - app_ctx = None - - self._cv_tokens.append((_cv_request.set(self), app_ctx)) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - clear_request = len(self._cv_tokens) == 1 - - try: - if clear_request: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - finally: - ctx = _cv_request.get() - token, app_ctx = self._cv_tokens.pop() - _cv_request.reset(token) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - ctx.request.environ["werkzeug.request"] = None - - if app_ctx is not None: - app_ctx.pop(exc) - - if ctx is not self: - raise AssertionError( - f"Popped wrong request context. ({ctx!r} instead of {self!r})" - ) - - def __enter__(self) -> RequestContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/.venv/lib/python3.12/site-packages/flask/debughelpers.py b/.venv/lib/python3.12/site-packages/flask/debughelpers.py deleted file mode 100644 index 2c8c4c4..0000000 --- a/.venv/lib/python3.12/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2.loaders import BaseLoader -from werkzeug.routing import RequestRedirect - -from .blueprints import Blueprint -from .globals import request_ctx -from .sansio.app import App - -if t.TYPE_CHECKING: - from .sansio.scaffold import Scaffold - from .wrappers import Request - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request: Request, key: str) -> None: - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self) -> str: - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised in debug mode if a routing redirect - would cause the browser to drop the method or body. This happens - when method is not GET, HEAD or OPTIONS and the status code is not - 307 or 308. - """ - - def __init__(self, request: Request) -> None: - exc = request.routing_exception - assert isinstance(exc, RequestRedirect) - buf = [ - f"A request was sent to '{request.url}', but routing issued" - f" a redirect to the canonical URL '{exc.new_url}'." - ] - - if f"{request.base_url}/" == exc.new_url.partition("?")[0]: - buf.append( - " The URL was defined with a trailing slash. Flask" - " will redirect to the URL with a trailing slash if it" - " was accessed without one." - ) - - buf.append( - " Send requests to the canonical URL, or use 307 or 308 for" - " routing redirects. Otherwise, browsers will drop form" - " data.\n\n" - "This exception is only raised in debug mode." - ) - super().__init__("".join(buf)) - - -def attach_enctype_error_multidict(request: Request) -> None: - """Patch ``request.files.__getitem__`` to raise a descriptive error - about ``enctype=multipart/form-data``. - - :param request: The request to patch. - :meta private: - """ - oldcls = request.files.__class__ - - class newcls(oldcls): # type: ignore[valid-type, misc] - def __getitem__(self, key: str) -> t.Any: - try: - return super().__getitem__(key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key).with_traceback( - e.__traceback__ - ) from None - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader: BaseLoader) -> t.Iterator[str]: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts( - app: App, - template: str, - attempts: list[ - tuple[ - BaseLoader, - Scaffold, - tuple[str, str | None, t.Callable[[], bool] | None] | None, - ] - ], -) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - if request_ctx and request_ctx.request.blueprint is not None: - blueprint = request_ctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, App): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) diff --git a/.venv/lib/python3.12/site-packages/flask/globals.py b/.venv/lib/python3.12/site-packages/flask/globals.py deleted file mode 100644 index e2c410c..0000000 --- a/.venv/lib/python3.12/site-packages/flask/globals.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextvars import ContextVar - -from werkzeug.local import LocalProxy - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .ctx import _AppCtxGlobals - from .ctx import AppContext - from .ctx import RequestContext - from .sessions import SessionMixin - from .wrappers import Request - - -_no_app_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -the current application. To solve this, set up an application context -with app.app_context(). See the documentation for more information.\ -""" -_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") -app_ctx: AppContext = LocalProxy( # type: ignore[assignment] - _cv_app, unbound_message=_no_app_msg -) -current_app: Flask = LocalProxy( # type: ignore[assignment] - _cv_app, "app", unbound_message=_no_app_msg -) -g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] - _cv_app, "g", unbound_message=_no_app_msg -) - -_no_req_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") -request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] - _cv_request, unbound_message=_no_req_msg -) -request: Request = LocalProxy( # type: ignore[assignment] - _cv_request, "request", unbound_message=_no_req_msg -) -session: SessionMixin = LocalProxy( # type: ignore[assignment] - _cv_request, "session", unbound_message=_no_req_msg -) diff --git a/.venv/lib/python3.12/site-packages/flask/helpers.py b/.venv/lib/python3.12/site-packages/flask/helpers.py deleted file mode 100644 index 5d412c9..0000000 --- a/.venv/lib/python3.12/site-packages/flask/helpers.py +++ /dev/null @@ -1,641 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import sys -import typing as t -from datetime import datetime -from functools import cache -from functools import update_wrapper - -import werkzeug.utils -from werkzeug.exceptions import abort as _wz_abort -from werkzeug.utils import redirect as _wz_redirect -from werkzeug.wrappers import Response as BaseResponse - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .globals import request_ctx -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated by the - :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. - """ - val = os.environ.get("FLASK_DEBUG") - return bool(val and val.lower() not in {"0", "false", "no"}) - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading default dotenv files by - setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load - the files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -@t.overload -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr], -) -> t.Iterator[t.AnyStr]: ... - - -@t.overload -def stream_with_context( - generator_or_function: t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: ... - - -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Iterator[t.AnyStr] | t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: - """Wrap a response generator function so that it runs inside the current - request context. This keeps :data:`request`, :data:`session`, and :data:`g` - available, even though at the point the generator runs the request context - will typically have ended. - - Use it as a decorator on a generator function: - - .. code-block:: python - - from flask import stream_with_context, request, Response - - @app.get("/stream") - def streamed_response(): - @stream_with_context - def generate(): - yield "Hello " - yield request.args["name"] - yield "!" - - return Response(generate()) - - Or use it as a wrapper around a created generator: - - .. code-block:: python - - from flask import stream_with_context, request, Response - - @app.get("/stream") - def streamed_response(): - def generate(): - yield "Hello " - yield request.args["name"] - yield "!" - - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore[arg-type] - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore[operator] - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore[arg-type] - - def generator() -> t.Iterator[t.AnyStr]: - if (req_ctx := _cv_request.get(None)) is None: - raise RuntimeError( - "'stream_with_context' can only be used when a request" - " context is active, such as in a view function." - ) - - app_ctx = _cv_app.get() - # Setup code below will run the generator to this point, so that the - # current contexts are recorded. The contexts must be pushed after, - # otherwise their ContextVar will record the wrong event loop during - # async view functions. - yield None # type: ignore[misc] - - # Push the app context first, so that the request context does not - # automatically create and push a different app context. - with app_ctx, req_ctx: - try: - yield from gen - finally: - # Clean up in case the user wrapped a WSGI iterator. - if hasattr(gen, "close"): - gen.close() - - # Execute the generator to the sentinel value. This ensures the context is - # preserved in the generator's state. Further iteration will push the - # context and yield from the original iterator. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args: t.Any) -> Response: - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for( - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, -) -> str: - """Generate a URL to the given endpoint with the given values. - - This requires an active request or application context, and calls - :meth:`current_app.url_for() `. See that method - for full documentation. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it is - external. - :param _external: If given, prefer the URL to be internal (False) or - require it to be external (True). External URLs include the - scheme and domain. When not in an active request, URLs are - external by default. - :param values: Values to use for the variable parts of the URL rule. - Unknown keys are appended as query string arguments, like - ``?a=b&c=d``. - - .. versionchanged:: 2.2 - Calls ``current_app.url_for``, allowing an app to override the - behavior. - - .. versionchanged:: 0.10 - The ``_scheme`` parameter was added. - - .. versionchanged:: 0.9 - The ``_anchor`` and ``_method`` parameters were added. - - .. versionchanged:: 0.9 - Calls ``app.handle_url_build_error`` on build errors. - """ - return current_app.url_for( - endpoint, - _anchor=_anchor, - _method=_method, - _scheme=_scheme, - _external=_external, - **values, - ) - - -def redirect( - location: str, code: int = 302, Response: type[BaseResponse] | None = None -) -> BaseResponse: - """Create a redirect response object. - - If :data:`~flask.current_app` is available, it will use its - :meth:`~flask.Flask.redirect` method, otherwise it will use - :func:`werkzeug.utils.redirect`. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - :param Response: The response class to use. Not used when - ``current_app`` is active, which uses ``app.response_class``. - - .. versionadded:: 2.2 - Calls ``current_app.redirect`` if available instead of always - using Werkzeug's default ``redirect``. - """ - if current_app: - return current_app.redirect(location, code=code) - - return _wz_redirect(location, code=code, Response=Response) - - -def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given - status code. - - If :data:`~flask.current_app` is available, it will call its - :attr:`~flask.Flask.aborter` object, otherwise it will use - :func:`werkzeug.exceptions.abort`. - - :param code: The status code for the exception, which must be - registered in ``app.aborter``. - :param args: Passed to the exception. - :param kwargs: Passed to the exception. - - .. versionadded:: 2.2 - Calls ``current_app.aborter`` if available instead of always - using Werkzeug's default ``abort``. - """ - if current_app: - current_app.aborter(code, *args, **kwargs) - - _wz_abort(code, *args, **kwargs) - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - app = current_app._get_current_object() # type: ignore - message_flashed.send( - app, - _async_wrapper=app.ensure_sync, - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> list[str] | list[tuple[str, str]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = request_ctx.flashes - if flashes is None: - flashes = session.pop("_flashes") if "_flashes" in session else [] - request_ctx.flashes = flashes - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: - if kwargs.get("max_age") is None: - kwargs["max_age"] = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - use_x_sendfile=current_app.config["USE_X_SENDFILE"], - response_class=current_app.response_class, - _root_path=current_app.root_path, - ) - return kwargs - - -def send_file( - path_or_file: os.PathLike[t.AnyStr] | str | t.IO[bytes], - mimetype: str | None = None, - as_attachment: bool = False, - download_name: str | None = None, - conditional: bool = True, - etag: bool | str = True, - last_modified: datetime | int | float | None = None, - max_age: None | (int | t.Callable[[str | None], int | None]) = None, -) -> Response: - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - removed because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( # type: ignore[return-value] - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - conditional=conditional, - etag=etag, - last_modified=last_modified, - max_age=max_age, - ) - ) - - -def send_from_directory( - directory: os.PathLike[str] | str, - path: os.PathLike[str] | str, - **kwargs: t.Any, -) -> Response: - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under, - relative to the current application's root path. This *must not* - be a value provided by the client, otherwise it becomes insecure. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - return werkzeug.utils.send_from_directory( # type: ignore[return-value] - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - try: - spec = importlib.util.find_spec(import_name) - - if spec is None: - raise ValueError - except (ImportError, ValueError): - loader = None - else: - loader = spec.loader - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None: - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) # pyright: ignore - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) # type: ignore[no-any-return] - - -@cache -def _split_blueprint_path(name: str) -> list[str]: - out: list[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/.venv/lib/python3.12/site-packages/flask/json/__init__.py b/.venv/lib/python3.12/site-packages/flask/json/__init__.py deleted file mode 100644 index c0941d0..0000000 --- a/.venv/lib/python3.12/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - -from ..globals import current_app -from .provider import _default - -if t.TYPE_CHECKING: # pragma: no cover - from ..wrappers import Response - - -def dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dumps() ` - method, otherwise it will use :func:`json.dumps`. - - :param obj: The data to serialize. - :param kwargs: Arguments passed to the ``dumps`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dumps``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.dumps(obj, **kwargs) - - kwargs.setdefault("default", _default) - return _json.dumps(obj, **kwargs) - - -def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dump() ` - method, otherwise it will use :func:`json.dump`. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: Arguments passed to the ``dump`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dump``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, will be - removed in Flask 2.1. - """ - if current_app: - current_app.json.dump(obj, fp, **kwargs) - else: - kwargs.setdefault("default", _default) - _json.dump(obj, fp, **kwargs) - - -def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.loads() ` - method, otherwise it will use :func:`json.loads`. - - :param s: Text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``loads`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.loads``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The data must be a - string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.loads(s, **kwargs) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.load() ` - method, otherwise it will use :func:`json.load`. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``load`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.load``, allowing an app to override - the behavior. - - .. versionchanged:: 2.2 - The ``app`` parameter will be removed in Flask 2.3. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The file must be text - mode, or binary mode with UTF-8 bytes. - """ - if current_app: - return current_app.json.load(fp, **kwargs) - - return _json.load(fp, **kwargs) - - -def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. A dict or list returned from a view will be converted to a - JSON response automatically without needing to call this. - - This requires an active request or application context, and calls - :meth:`app.json.response() `. - - In debug mode, the output is formatted with indentation to make it - easier to read. This may also be controlled by the provider. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - - .. versionchanged:: 2.2 - Calls ``current_app.json.response``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 0.11 - Added support for serializing top-level arrays. This was a - security risk in ancient browsers. See :ref:`security-json`. - - .. versionadded:: 0.2 - """ - return current_app.json.response(*args, **kwargs) # type: ignore[return-value] diff --git a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 02eaadb..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc deleted file mode 100644 index 7affe57..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/provider.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc deleted file mode 100644 index f817009..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/json/__pycache__/tag.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/json/provider.py b/.venv/lib/python3.12/site-packages/flask/json/provider.py deleted file mode 100644 index ea7e475..0000000 --- a/.venv/lib/python3.12/site-packages/flask/json/provider.py +++ /dev/null @@ -1,215 +0,0 @@ -from __future__ import annotations - -import dataclasses -import decimal -import json -import typing as t -import uuid -import weakref -from datetime import date - -from werkzeug.http import http_date - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.sansio.response import Response - - from ..sansio.app import App - - -class JSONProvider: - """A standard set of JSON operations for an application. Subclasses - of this can be used to customize JSON behavior or use different - JSON libraries. - - To implement a provider for a specific library, subclass this base - class and implement at least :meth:`dumps` and :meth:`loads`. All - other methods have default implementations. - - To use a different provider, either subclass ``Flask`` and set - :attr:`~flask.Flask.json_provider_class` to a provider class, or set - :attr:`app.json ` to an instance of the class. - - :param app: An application instance. This will be stored as a - :class:`weakref.proxy` on the :attr:`_app` attribute. - - .. versionadded:: 2.2 - """ - - def __init__(self, app: App) -> None: - self._app: App = weakref.proxy(app) - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - :param obj: The data to serialize. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: May be passed to the underlying JSON library. - """ - fp.write(self.dumps(obj, **kwargs)) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - :param s: Text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - return self.loads(fp.read(), **kwargs) - - def _prepare_response_obj( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> t.Any: - if args and kwargs: - raise TypeError("app.json.response() takes either args or kwargs, not both") - - if not args and not kwargs: - return None - - if len(args) == 1: - return args[0] - - return args or kwargs - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. - - The :func:`~flask.json.jsonify` function calls this method for - the current application. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - return self._app.response_class(self.dumps(obj), mimetype="application/json") - - -def _default(o: t.Any) -> t.Any: - if isinstance(o, date): - return http_date(o) - - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) # type: ignore[arg-type] - - if hasattr(o, "__html__"): - return str(o.__html__()) - - raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") - - -class DefaultJSONProvider(JSONProvider): - """Provide JSON operations using Python's built-in :mod:`json` - library. Serializes the following additional data types: - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - """ - - default: t.Callable[[t.Any], t.Any] = staticmethod(_default) # type: ignore[assignment] - """Apply this function to any object that :meth:`json.dumps` does - not know how to serialize. It should return a valid JSON type or - raise a ``TypeError``. - """ - - ensure_ascii = True - """Replace non-ASCII characters with escape sequences. This may be - more compatible with some clients, but can be disabled for better - performance and size. - """ - - sort_keys = True - """Sort the keys in any serialized dicts. This may be useful for - some caching situations, but can be disabled for better performance. - When enabled, keys must all be strings, they are not converted - before sorting. - """ - - compact: bool | None = None - """If ``True``, or ``None`` out of debug mode, the :meth:`response` - output will not add indentation, newlines, or spaces. If ``False``, - or ``None`` in debug mode, it will use a non-compact representation. - """ - - mimetype = "application/json" - """The mimetype set in :meth:`response`.""" - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON to a string. - - Keyword arguments are passed to :func:`json.dumps`. Sets some - parameter defaults from the :attr:`default`, - :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. - - :param obj: The data to serialize. - :param kwargs: Passed to :func:`json.dumps`. - """ - kwargs.setdefault("default", self.default) - kwargs.setdefault("ensure_ascii", self.ensure_ascii) - kwargs.setdefault("sort_keys", self.sort_keys) - return json.dumps(obj, **kwargs) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON from a string or bytes. - - :param s: Text or UTF-8 bytes. - :param kwargs: Passed to :func:`json.loads`. - """ - return json.loads(s, **kwargs) - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with it. The response mimetype - will be "application/json" and can be changed with - :attr:`mimetype`. - - If :attr:`compact` is ``False`` or debug mode is enabled, the - output will be formatted to be easier to read. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - dump_args: dict[str, t.Any] = {} - - if (self.compact is None and self._app.debug) or self.compact is False: - dump_args.setdefault("indent", 2) - else: - dump_args.setdefault("separators", (",", ":")) - - return self._app.response_class( - f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype - ) diff --git a/.venv/lib/python3.12/site-packages/flask/json/tag.py b/.venv/lib/python3.12/site-packages/flask/json/tag.py deleted file mode 100644 index 8dc3629..0000000 --- a/.venv/lib/python3.12/site-packages/flask/json/tag.py +++ /dev/null @@ -1,327 +0,0 @@ -""" -Tagged JSON -~~~~~~~~~~~ - -A compact representation for lossless serialization of non-standard JSON -types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this -to serialize the session data, but it may be useful in other places. It -can be extended to support other types. - -.. autoclass:: TaggedJSONSerializer - :members: - -.. autoclass:: JSONTag - :members: - -Let's see an example that adds support for -:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so -to handle this we will dump the items as a list of ``[key, value]`` -pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to -identify the type. The session serializer processes dicts first, so -insert the new tag at the front of the order since ``OrderedDict`` must -be processed before ``dict``. - -.. code-block:: python - - from flask.json.tag import JSONTag - - class TagOrderedDict(JSONTag): - __slots__ = ('serializer',) - key = ' od' - - def check(self, value): - return isinstance(value, OrderedDict) - - def to_json(self, value): - return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] - - def to_python(self, value): - return OrderedDict(value) - - app.session_interface.serializer.register(TagOrderedDict, index=0) -""" - -from __future__ import annotations - -import typing as t -from base64 import b64decode -from base64 import b64encode -from datetime import datetime -from uuid import UUID - -from markupsafe import Markup -from werkzeug.http import http_date -from werkzeug.http import parse_date - -from ..json import dumps -from ..json import loads - - -class JSONTag: - """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" - - __slots__ = ("serializer",) - - #: The tag to mark the serialized object with. If empty, this tag is - #: only used as an intermediate step during tagging. - key: str = "" - - def __init__(self, serializer: TaggedJSONSerializer) -> None: - """Create a tagger for the given serializer.""" - self.serializer = serializer - - def check(self, value: t.Any) -> bool: - """Check if the given value should be tagged by this tag.""" - raise NotImplementedError - - def to_json(self, value: t.Any) -> t.Any: - """Convert the Python object to an object that is a valid JSON type. - The tag will be added later.""" - raise NotImplementedError - - def to_python(self, value: t.Any) -> t.Any: - """Convert the JSON representation back to the correct type. The tag - will already be removed.""" - raise NotImplementedError - - def tag(self, value: t.Any) -> dict[str, t.Any]: - """Convert the value to a valid JSON type and add the tag structure - around it.""" - return {self.key: self.to_json(value)} - - -class TagDict(JSONTag): - """Tag for 1-item dicts whose only key matches a registered tag. - - Internally, the dict key is suffixed with `__`, and the suffix is removed - when deserializing. - """ - - __slots__ = () - key = " di" - - def check(self, value: t.Any) -> bool: - return ( - isinstance(value, dict) - and len(value) == 1 - and next(iter(value)) in self.serializer.tags - ) - - def to_json(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {f"{key}__": self.serializer.tag(value[key])} - - def to_python(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {key[:-2]: value[key]} - - -class PassDict(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, dict) - - def to_json(self, value: t.Any) -> t.Any: - # JSON objects may only have string keys, so don't bother tagging the - # key here. - return {k: self.serializer.tag(v) for k, v in value.items()} - - tag = to_json - - -class TagTuple(JSONTag): - __slots__ = () - key = " t" - - def check(self, value: t.Any) -> bool: - return isinstance(value, tuple) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - def to_python(self, value: t.Any) -> t.Any: - return tuple(value) - - -class PassList(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, list) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - tag = to_json - - -class TagBytes(JSONTag): - __slots__ = () - key = " b" - - def check(self, value: t.Any) -> bool: - return isinstance(value, bytes) - - def to_json(self, value: t.Any) -> t.Any: - return b64encode(value).decode("ascii") - - def to_python(self, value: t.Any) -> t.Any: - return b64decode(value) - - -class TagMarkup(JSONTag): - """Serialize anything matching the :class:`~markupsafe.Markup` API by - having a ``__html__`` method to the result of that method. Always - deserializes to an instance of :class:`~markupsafe.Markup`.""" - - __slots__ = () - key = " m" - - def check(self, value: t.Any) -> bool: - return callable(getattr(value, "__html__", None)) - - def to_json(self, value: t.Any) -> t.Any: - return str(value.__html__()) - - def to_python(self, value: t.Any) -> t.Any: - return Markup(value) - - -class TagUUID(JSONTag): - __slots__ = () - key = " u" - - def check(self, value: t.Any) -> bool: - return isinstance(value, UUID) - - def to_json(self, value: t.Any) -> t.Any: - return value.hex - - def to_python(self, value: t.Any) -> t.Any: - return UUID(value) - - -class TagDateTime(JSONTag): - __slots__ = () - key = " d" - - def check(self, value: t.Any) -> bool: - return isinstance(value, datetime) - - def to_json(self, value: t.Any) -> t.Any: - return http_date(value) - - def to_python(self, value: t.Any) -> t.Any: - return parse_date(value) - - -class TaggedJSONSerializer: - """Serializer that uses a tag system to compactly represent objects that - are not JSON types. Passed as the intermediate serializer to - :class:`itsdangerous.Serializer`. - - The following extra types are supported: - - * :class:`dict` - * :class:`tuple` - * :class:`bytes` - * :class:`~markupsafe.Markup` - * :class:`~uuid.UUID` - * :class:`~datetime.datetime` - """ - - __slots__ = ("tags", "order") - - #: Tag classes to bind when creating the serializer. Other tags can be - #: added later using :meth:`~register`. - default_tags = [ - TagDict, - PassDict, - TagTuple, - PassList, - TagBytes, - TagMarkup, - TagUUID, - TagDateTime, - ] - - def __init__(self) -> None: - self.tags: dict[str, JSONTag] = {} - self.order: list[JSONTag] = [] - - for cls in self.default_tags: - self.register(cls) - - def register( - self, - tag_class: type[JSONTag], - force: bool = False, - index: int | None = None, - ) -> None: - """Register a new tag with this serializer. - - :param tag_class: tag class to register. Will be instantiated with this - serializer instance. - :param force: overwrite an existing tag. If false (default), a - :exc:`KeyError` is raised. - :param index: index to insert the new tag in the tag order. Useful when - the new tag is a special case of an existing tag. If ``None`` - (default), the tag is appended to the end of the order. - - :raise KeyError: if the tag key is already registered and ``force`` is - not true. - """ - tag = tag_class(self) - key = tag.key - - if key: - if not force and key in self.tags: - raise KeyError(f"Tag '{key}' is already registered.") - - self.tags[key] = tag - - if index is None: - self.order.append(tag) - else: - self.order.insert(index, tag) - - def tag(self, value: t.Any) -> t.Any: - """Convert a value to a tagged representation if necessary.""" - for tag in self.order: - if tag.check(value): - return tag.tag(value) - - return value - - def untag(self, value: dict[str, t.Any]) -> t.Any: - """Convert a tagged representation back to the original type.""" - if len(value) != 1: - return value - - key = next(iter(value)) - - if key not in self.tags: - return value - - return self.tags[key].to_python(value[key]) - - def _untag_scan(self, value: t.Any) -> t.Any: - if isinstance(value, dict): - # untag each item recursively - value = {k: self._untag_scan(v) for k, v in value.items()} - # untag the dict itself - value = self.untag(value) - elif isinstance(value, list): - # untag each item recursively - value = [self._untag_scan(item) for item in value] - - return value - - def dumps(self, value: t.Any) -> str: - """Tag the value and dump it to a compact JSON string.""" - return dumps(self.tag(value), separators=(",", ":")) - - def loads(self, value: str) -> t.Any: - """Load data from a JSON string and deserialized any tagged objects.""" - return self._untag_scan(loads(value)) diff --git a/.venv/lib/python3.12/site-packages/flask/logging.py b/.venv/lib/python3.12/site-packages/flask/logging.py deleted file mode 100644 index 0cb8f43..0000000 --- a/.venv/lib/python3.12/site-packages/flask/logging.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -import logging -import sys -import typing as t - -from werkzeug.local import LocalProxy - -from .globals import request - -if t.TYPE_CHECKING: # pragma: no cover - from .sansio.app import App - - -@LocalProxy -def wsgi_errors_stream() -> t.TextIO: - """Find the most appropriate error stream for the application. If a request - is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. - - If you configure your own :class:`logging.StreamHandler`, you may want to - use this for the stream. If you are using file or dict configuration and - can't import this directly, you can refer to it as - ``ext://flask.logging.wsgi_errors_stream``. - """ - if request: - return request.environ["wsgi.errors"] # type: ignore[no-any-return] - - return sys.stderr - - -def has_level_handler(logger: logging.Logger) -> bool: - """Check if there is a handler in the logging chain that will handle the - given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. - """ - level = logger.getEffectiveLevel() - current = logger - - while current: - if any(handler.level <= level for handler in current.handlers): - return True - - if not current.propagate: - break - - current = current.parent # type: ignore - - return False - - -#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format -#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. -default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore -default_handler.setFormatter( - logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") -) - - -def create_logger(app: App) -> logging.Logger: - """Get the Flask app's logger and configure it if needed. - - The logger name will be the same as - :attr:`app.import_name `. - - When :attr:`~flask.Flask.debug` is enabled, set the logger level to - :data:`logging.DEBUG` if it is not set. - - If there is no handler for the logger's effective level, add a - :class:`~logging.StreamHandler` for - :func:`~flask.logging.wsgi_errors_stream` with a basic format. - """ - logger = logging.getLogger(app.name) - - if app.debug and not logger.level: - logger.setLevel(logging.DEBUG) - - if not has_level_handler(logger): - logger.addHandler(default_handler) - - return logger diff --git a/.venv/lib/python3.12/site-packages/flask/py.typed b/.venv/lib/python3.12/site-packages/flask/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/README.md b/.venv/lib/python3.12/site-packages/flask/sansio/README.md deleted file mode 100644 index 623ac19..0000000 --- a/.venv/lib/python3.12/site-packages/flask/sansio/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Sansio - -This folder contains code that can be used by alternative Flask -implementations, for example Quart. The code therefore cannot do any -IO, nor be part of a likely IO path. Finally this code cannot use the -Flask globals. diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc deleted file mode 100644 index 3cf22ea..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/app.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc deleted file mode 100644 index 962fb1b..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/blueprints.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc deleted file mode 100644 index 39adeca..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask/sansio/__pycache__/scaffold.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/app.py b/.venv/lib/python3.12/site-packages/flask/sansio/app.py deleted file mode 100644 index a2592fe..0000000 --- a/.venv/lib/python3.12/site-packages/flask/sansio/app.py +++ /dev/null @@ -1,964 +0,0 @@ -from __future__ import annotations - -import logging -import os -import sys -import typing as t -from datetime import timedelta -from itertools import chain - -from werkzeug.exceptions import Aborter -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import Rule -from werkzeug.sansio.response import Response -from werkzeug.utils import cached_property -from werkzeug.utils import redirect as _wz_redirect - -from .. import typing as ft -from ..config import Config -from ..config import ConfigAttribute -from ..ctx import _AppCtxGlobals -from ..helpers import _split_blueprint_path -from ..helpers import get_debug_flag -from ..json.provider import DefaultJSONProvider -from ..json.provider import JSONProvider -from ..logging import create_logger -from ..templating import DispatchingJinjaLoader -from ..templating import Environment -from .scaffold import _endpoint_from_view_func -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.wrappers import Response as BaseResponse - - from ..testing import FlaskClient - from ..testing import FlaskCliRunner - from .blueprints import Blueprint - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class App(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class of the object assigned to :attr:`aborter`, created by - #: :meth:`create_aborter`. That object is called by - #: :func:`flask.abort` to raise HTTP errors, and can be - #: called directly as well. - #: - #: Defaults to :class:`werkzeug.exceptions.Aborter`. - #: - #: .. versionadded:: 2.2 - aborter_class = Aborter - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute[bool]("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute[t.Union[str, bytes, None]]("SECRET_KEY") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute[timedelta]( - "PERMANENT_SESSION_LIFETIME", - get_converter=_make_timedelta, # type: ignore[arg-type] - ) - - json_provider_class: type[JSONProvider] = DefaultJSONProvider - """A subclass of :class:`~flask.json.provider.JSONProvider`. An - instance is created and assigned to :attr:`app.json` when creating - the app. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses - Python's built-in :mod:`json` library. A different provider can use - a different JSON library. - - .. versionadded:: 2.2 - """ - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict[str, t.Any] = {} - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: type[FlaskClient] | None = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: type[FlaskCliRunner] | None = None - - default_config: dict[str, t.Any] - response_class: type[Response] - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ) -> None: - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: An instance of :attr:`aborter_class` created by - #: :meth:`make_aborter`. This is called by :func:`flask.abort` - #: to raise HTTP errors, and can be called directly as well. - #: - #: .. versionadded:: 2.2 - #: Moved from ``flask.abort``, which calls this object. - self.aborter = self.make_aborter() - - self.json: JSONProvider = self.json_provider_class(self) - """Provides access to JSON methods. Functions in ``flask.json`` - will call methods on this provider when the application context - is active. Used for handling JSON requests and responses. - - An instance of :attr:`json_provider_class`. Can be customized by - changing that attribute on a subclass, or by assigning to this - attribute afterwards. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, - uses Python's built-in :mod:`json` library. A different provider - can use a different JSON library. - - .. versionadded:: 2.2 - """ - - #: A list of functions that are called by - #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function is called - #: with ``error``, ``endpoint`` and ``values``. If a function - #: returns ``None`` or raises a ``BuildError``, it is skipped. - #: Otherwise, its return value is returned by ``url_for``. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: list[ - t.Callable[[Exception, str, dict[str, t.Any]], str] - ] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: dict[str, Blueprint] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict[str, t.Any] = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class(host_matching=host_matching) - - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_first_request: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called" - " on the application. It has already handled its first" - " request, any changes will not be applied" - " consistently.\n" - "Make sure all imports, decorators, functions, etc." - " needed to set up the application are done before" - " running it." - ) - - @cached_property - def name(self) -> str: - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn: str | None = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - def create_jinja_environment(self) -> Environment: - raise NotImplementedError() - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def make_aborter(self) -> Aborter: - """Create the object to assign to :attr:`aborter`. That object - is called by :func:`flask.abort` to raise HTTP errors, and can - be called directly as well. - - By default, this creates an instance of :attr:`aborter_class`, - which defaults to :class:`werkzeug.exceptions.Aborter`. - - .. versionadded:: 2.2 - """ - return self.aborter_class() - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionchanged:: 2.2 - Autoescaping is now enabled by default for ``.svg`` files. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start the - development server, an interactive debugger will be shown for unhandled - exceptions, and the server will be reloaded when code changes. This maps to the - :data:`DEBUG` config key. It may not behave as expected if set late. - - **Do not enable debug mode when deploying in production.** - - Default: ``False`` - """ - return self.config["DEBUG"] # type: ignore[no-any-return] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - - if self.config["TEMPLATES_AUTO_RELOAD"] is None: - self.jinja_env.auto_reload = value - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView[Blueprint]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods: set[str] = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods and self.config["PROVIDE_AUTOMATIC_OPTIONS"]: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule_obj = self.url_rule_class(rule, methods=methods, **options) - rule_obj.provide_automatic_options = provide_automatic_options # type: ignore[attr-defined] - - self.url_map.add(rule_obj) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def teardown_appcontext(self, f: T_teardown) -> T_teardown: - """Registers a function to be called when the application - context is popped. The application context is typically popped - after the request context for each request, at the end of CLI - commands, or after a manually pushed context ends. - - .. code-block:: python - - with app.app_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the app context is - made inactive. Since a request context typically also manages an - application context it would also be called when you pop a - request context. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor( - self, f: T_shell_context_processor - ) -> T_shell_context_processor: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler( - self, e: Exception, blueprints: list[str] - ) -> ft.ErrorHandlerCallable | None: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def should_ignore_error(self, error: BaseException | None) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def redirect(self, location: str, code: int = 302) -> BaseResponse: - """Create a redirect response object. - - This is called by :func:`flask.redirect`, and can be called - directly as well. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - - .. versionadded:: 2.2 - Moved from ``flask.redirect``, which calls this method. - """ - return _wz_redirect( - location, - code=code, - Response=self.response_class, # type: ignore[arg-type] - ) - - def inject_url_defaults(self, endpoint: str, values: dict[str, t.Any]) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[str | None] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: BuildError, endpoint: str, values: dict[str, t.Any] - ) -> str: - """Called by :meth:`.url_for` if a - :exc:`~werkzeug.routing.BuildError` was raised. If this returns - a value, it will be returned by ``url_for``, otherwise the error - will be re-raised. - - Each function in :attr:`url_build_error_handlers` is called with - ``error``, ``endpoint`` and ``values``. If a function returns - ``None`` or raises a ``BuildError``, it is skipped. Otherwise, - its return value is returned by ``url_for``. - - :param error: The active ``BuildError`` being handled. - :param endpoint: The endpoint being built. - :param values: The keyword arguments passed to ``url_for``. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/blueprints.py b/.venv/lib/python3.12/site-packages/flask/sansio/blueprints.py deleted file mode 100644 index 4f912cc..0000000 --- a/.venv/lib/python3.12/site-packages/flask/sansio/blueprints.py +++ /dev/null @@ -1,632 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from .. import typing as ft -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from .app import App - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], None] -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: Blueprint, - app: App, - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - _got_registered_once = False - - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore[assignment] - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if not name: - raise ValueError("'name' may not be empty.") - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: list[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: list[tuple[Blueprint, dict[str, t.Any]]] = [] - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_registered_once: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called on the blueprint" - f" '{self.name}'. It has already been registered at least once, any" - " changes will not be applied consistently.\n" - "Make sure all imports, decorators, functions, etc. needed to set up" - " the blueprint are done before registering it." - ) - - @setupmethod - def record(self, func: DeferredSetupFunction) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - self.deferred_functions.append(func) - - @setupmethod - def record_once(self, func: DeferredSetupFunction) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: App, options: dict[str, t.Any], first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: App, options: dict[str, t.Any]) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.3 - Nested blueprints now correctly apply subdomains. - - .. versionchanged:: 2.1 - Registering the same blueprint with the same name multiple - times is an error. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - bp_desc = "this" if app.blueprints[name] is self else "a different" - existing_at = f" '{name}'" if self_name != name else "" - - raise ValueError( - f"The name '{self_name}' is already registered for" - f" {bp_desc} blueprint{existing_at}. Use 'name=' to" - f" provide a unique name." - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, # type: ignore[attr-defined] - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - self._merge_blueprint_funcs(app, name) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - bp_subdomain = bp_options.get("subdomain") - - if bp_subdomain is None: - bp_subdomain = blueprint.subdomain - - if state.subdomain is not None and bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain + "." + state.subdomain - elif bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain - elif state.subdomain is not None: - bp_options["subdomain"] = state.subdomain - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - def _merge_blueprint_funcs(self, app: App, name: str) -> None: - def extend( - bp_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - parent_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - ) -> None: - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: {exc_class: func for exc_class, func in code_values.items()} - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for - full documentation. - - The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, - used with :func:`url_for`, is prefixed with the blueprint's name. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - @setupmethod - def app_template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """Register a template filter, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a template filter, available in any template rendered by the - application. Works like the :meth:`app_template_filter` decorator. Equivalent to - :meth:`.Flask.add_template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """Register a template test, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_app_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a template test, available in any template rendered by the - application. Works like the :meth:`app_template_test` decorator. Equivalent to - :meth:`.Flask.add_template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """Register a template global, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_app_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a template global, available in any template rendered by the - application. Works like the :meth:`app_template_global` decorator. Equivalent to - :meth:`.Flask.add_template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def before_app_request(self, f: T_before_request) -> T_before_request: - """Like :meth:`before_request`, but before every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.before_request`. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def after_app_request(self, f: T_after_request) -> T_after_request: - """Like :meth:`after_request`, but after every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.after_request`. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def teardown_app_request(self, f: T_teardown) -> T_teardown: - """Like :meth:`teardown_request`, but after every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_context_processor( - self, f: T_template_context_processor - ) -> T_template_context_processor: - """Like :meth:`context_processor`, but for templates rendered by every view, not - only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_errorhandler( - self, code: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Like :meth:`errorhandler`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.errorhandler`. - """ - - def decorator(f: T_error_handler) -> T_error_handler: - def from_blueprint(state: BlueprintSetupState) -> None: - state.app.errorhandler(code)(f) - - self.record_once(from_blueprint) - return f - - return decorator - - @setupmethod - def app_url_value_preprocessor( - self, f: T_url_value_preprocessor - ) -> T_url_value_preprocessor: - """Like :meth:`url_value_preprocessor`, but for every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Like :meth:`url_defaults`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.url_defaults`. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/.venv/lib/python3.12/site-packages/flask/sansio/scaffold.py b/.venv/lib/python3.12/site-packages/flask/sansio/scaffold.py deleted file mode 100644 index 0e96f15..0000000 --- a/.venv/lib/python3.12/site-packages/flask/sansio/scaffold.py +++ /dev/null @@ -1,792 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import pathlib -import sys -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from jinja2 import BaseLoader -from jinja2 import FileSystemLoader -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.utils import cached_property - -from .. import typing as ft -from ..helpers import get_root_path -from ..templating import _default_template_ctx_processor - -if t.TYPE_CHECKING: # pragma: no cover - from click import Group - -# a singleton sentinel value for parameter defaults -_sentinel = object() - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) -T_route = t.TypeVar("T_route", bound=ft.RouteCallable) - - -def setupmethod(f: F) -> F: - f_name = f.__name__ - - def wrapper_func(self: Scaffold, *args: t.Any, **kwargs: t.Any) -> t.Any: - self._check_setup_finished(f_name) - return f(self, *args, **kwargs) - - return t.cast(F, update_wrapper(wrapper_func, f)) - - -class Scaffold: - """Common behavior shared between :class:`~flask.Flask` and - :class:`~flask.blueprints.Blueprint`. - - :param import_name: The import name of the module where this object - is defined. Usually :attr:`__name__` should be used. - :param static_folder: Path to a folder of static files to serve. - If this is set, a static route will be added. - :param static_url_path: URL prefix for the static route. - :param template_folder: Path to a folder containing template files. - for rendering. If this is set, a Jinja loader will be added. - :param root_path: The path that static, template, and resource files - are relative to. Typically not set, it is discovered based on - the ``import_name``. - - .. versionadded:: 2.0 - """ - - cli: Group - name: str - _static_folder: str | None = None - _static_url_path: str | None = None - - def __init__( - self, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - root_path: str | None = None, - ): - #: The name of the package or module that this object belongs - #: to. Do not change this once it is set by the constructor. - self.import_name = import_name - - self.static_folder = static_folder - self.static_url_path = static_url_path - - #: The path to the templates folder, relative to - #: :attr:`root_path`, to add to the template loader. ``None`` if - #: templates should not be added. - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - #: Absolute path to the package on the filesystem. Used to look - #: up resources contained in the package. - self.root_path = root_path - - #: A dictionary mapping endpoint names to view functions. - #: - #: To register a view function, use the :meth:`route` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.view_functions: dict[str, ft.RouteCallable] = {} - - #: A data structure of registered error handlers, in the format - #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is - #: the name of a blueprint the handlers are active for, or - #: ``None`` for all requests. The ``code`` key is the HTTP - #: status code for ``HTTPException``, or ``None`` for - #: other exceptions. The innermost dictionary maps exception - #: classes to handler functions. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.error_handler_spec: dict[ - ft.AppOrBlueprintKey, - dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], - ] = defaultdict(lambda: defaultdict(dict)) - - #: A data structure of functions to call at the beginning of - #: each request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`before_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.before_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`after_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.after_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.AfterRequestCallable[t.Any]] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request even if an exception is raised, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`teardown_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.teardown_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.TeardownCallable] - ] = defaultdict(list) - - #: A data structure of functions to call to pass extra context - #: values when rendering templates, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`context_processor` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.template_context_processors: dict[ - ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] - ] = defaultdict(list, {None: [_default_template_ctx_processor]}) - - #: A data structure of functions to call to modify the keyword - #: arguments passed to the view function, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the - #: :meth:`url_value_preprocessor` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_value_preprocessors: dict[ - ft.AppOrBlueprintKey, - list[ft.URLValuePreprocessorCallable], - ] = defaultdict(list) - - #: A data structure of functions to call to modify the keyword - #: arguments when generating URLs, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`url_defaults` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_default_functions: dict[ - ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] - ] = defaultdict(list) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.name!r}>" - - def _check_setup_finished(self, f_name: str) -> None: - raise NotImplementedError - - @property - def static_folder(self) -> str | None: - """The absolute path to the configured static folder. ``None`` - if no static folder is set. - """ - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - else: - return None - - @static_folder.setter - def static_folder(self, value: str | os.PathLike[str] | None) -> None: - if value is not None: - value = os.fspath(value).rstrip(r"\/") - - self._static_folder = value - - @property - def has_static_folder(self) -> bool: - """``True`` if :attr:`static_folder` is set. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @property - def static_url_path(self) -> str | None: - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return f"/{basename}".rstrip("/") - - return None - - @static_url_path.setter - def static_url_path(self, value: str | None) -> None: - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - @cached_property - def jinja_loader(self) -> BaseLoader | None: - """The Jinja loader for this object's templates. By default this - is a class :class:`jinja2.loaders.FileSystemLoader` to - :attr:`template_folder` if it is set. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - else: - return None - - def _method_route( - self, - method: str, - rule: str, - options: dict[str, t.Any], - ) -> t.Callable[[T_route], T_route]: - if "methods" in options: - raise TypeError("Use the 'route' decorator to use the 'methods' argument.") - - return self.route(rule, methods=[method], **options) - - @setupmethod - def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["GET"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("GET", rule, options) - - @setupmethod - def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["POST"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("POST", rule, options) - - @setupmethod - def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PUT"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PUT", rule, options) - - @setupmethod - def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["DELETE"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("DELETE", rule, options) - - @setupmethod - def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PATCH"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PATCH", rule, options) - - @setupmethod - def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Decorate a view function to register it with the given URL - rule and options. Calls :meth:`add_url_rule`, which has more - details about the implementation. - - .. code-block:: python - - @app.route("/") - def index(): - return "Hello, World!" - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and - ``OPTIONS`` are added automatically. - - :param rule: The URL rule string. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - - def decorator(f: T_route) -> T_route: - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a rule for routing incoming requests and building - URLs. The :meth:`route` decorator is a shortcut to call this - with the ``view_func`` argument. These are equivalent: - - .. code-block:: python - - @app.route("/") - def index(): - ... - - .. code-block:: python - - def index(): - ... - - app.add_url_rule("/", view_func=index) - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. An error - will be raised if a function has already been registered for the - endpoint. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is - always added automatically, and ``OPTIONS`` is added - automatically by default. - - ``view_func`` does not necessarily need to be passed, but if the - rule should participate in routing an endpoint name must be - associated with a view function at some point with the - :meth:`endpoint` decorator. - - .. code-block:: python - - app.add_url_rule("/", endpoint="index") - - @app.endpoint("index") - def index(): - ... - - If ``view_func`` has a ``required_methods`` attribute, those - methods are added to the passed and automatic methods. If it - has a ``provide_automatic_methods`` attribute, it is used as the - default if the parameter is not passed. - - :param rule: The URL rule string. - :param endpoint: The endpoint name to associate with the rule - and view function. Used when routing and building URLs. - Defaults to ``view_func.__name__``. - :param view_func: The view function to associate with the - endpoint name. - :param provide_automatic_options: Add the ``OPTIONS`` method and - respond to ``OPTIONS`` requests automatically. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - raise NotImplementedError - - @setupmethod - def endpoint(self, endpoint: str) -> t.Callable[[F], F]: - """Decorate a view function to register it for the given - endpoint. Used if a rule is added without a ``view_func`` with - :meth:`add_url_rule`. - - .. code-block:: python - - app.add_url_rule("/ex", endpoint="example") - - @app.endpoint("example") - def example(): - ... - - :param endpoint: The endpoint name to associate with the view - function. - """ - - def decorator(f: F) -> F: - self.view_functions[endpoint] = f - return f - - return decorator - - @setupmethod - def before_request(self, f: T_before_request) -> T_before_request: - """Register a function to run before each request. - - For example, this can be used to open a database connection, or - to load the logged in user from the session. - - .. code-block:: python - - @app.before_request - def load_user(): - if "user_id" in session: - g.user = db.session.get(session["user_id"]) - - The function will be called without any arguments. If it returns - a non-``None`` value, the value is handled as if it was the - return value from the view, and further request handling is - stopped. - - This is available on both app and blueprint objects. When used on an app, this - executes before every request. When used on a blueprint, this executes before - every request that the blueprint handles. To register with a blueprint and - execute before every request, use :meth:`.Blueprint.before_app_request`. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def after_request(self, f: T_after_request) -> T_after_request: - """Register a function to run after each request to this object. - - The function is called with the response object, and must return - a response object. This allows the functions to modify or - replace the response before it is sent. - - If a function raises an exception, any remaining - ``after_request`` functions will not be called. Therefore, this - should not be used for actions that must execute, such as to - close resources. Use :meth:`teardown_request` for that. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.after_app_request`. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f: T_teardown) -> T_teardown: - """Register a function to be called when the request context is - popped. Typically this happens at the end of each request, but - contexts may be pushed manually as well during testing. - - .. code-block:: python - - with app.test_request_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the request context is - made inactive. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.teardown_app_request`. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def context_processor( - self, - f: T_template_context_processor, - ) -> T_template_context_processor: - """Registers a template context processor function. These functions run before - rendering a template. The keys of the returned dict are added as variables - available in the template. - - This is available on both app and blueprint objects. When used on an app, this - is called for every rendered template. When used on a blueprint, this is called - for templates rendered from the blueprint's views. To register with a blueprint - and affect every template, use :meth:`.Blueprint.app_context_processor`. - """ - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor( - self, - f: T_url_value_preprocessor, - ) -> T_url_value_preprocessor: - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_value_preprocessor`. - """ - self.url_value_preprocessors[None].append(f) - return f - - @setupmethod - def url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_defaults`. - """ - self.url_default_functions[None].append(f) - return f - - @setupmethod - def errorhandler( - self, code_or_exception: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - This is available on both app and blueprint objects. When used on an app, this - can handle errors from every request. When used on a blueprint, this can handle - errors from requests that the blueprint handles. To register with a blueprint - and affect every request, use :meth:`.Blueprint.app_errorhandler`. - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.register_error_handler(code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler( - self, - code_or_exception: type[Exception] | int, - f: ft.ErrorHandlerCallable, - ) -> None: - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - exc_class, code = self._get_exc_class_and_code(code_or_exception) - self.error_handler_spec[None][code][exc_class] = f - - @staticmethod - def _get_exc_class_and_code( - exc_class_or_code: type[Exception] | int, - ) -> tuple[type[Exception], int | None]: - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - exc_class: type[Exception] - - if isinstance(exc_class_or_code, int): - try: - exc_class = default_exceptions[exc_class_or_code] - except KeyError: - raise ValueError( - f"'{exc_class_or_code}' is not a recognized HTTP" - " error code. Use a subclass of HTTPException with" - " that code instead." - ) from None - else: - exc_class = exc_class_or_code - - if isinstance(exc_class, Exception): - raise TypeError( - f"{exc_class!r} is an instance, not a class. Handlers" - " can only be registered for Exception classes or HTTP" - " error codes." - ) - - if not issubclass(exc_class, Exception): - raise ValueError( - f"'{exc_class.__name__}' is not a subclass of Exception." - " Handlers can only be registered for Exception classes" - " or HTTP error codes." - ) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - -def _endpoint_from_view_func(view_func: ft.RouteCallable) -> str: - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def _find_package_path(import_name: str) -> str: - """Find the path that contains the package or module.""" - root_mod_name, _, _ = import_name.partition(".") - - try: - root_spec = importlib.util.find_spec(root_mod_name) - - if root_spec is None: - raise ValueError("not found") - except (ImportError, ValueError): - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - we raised `ValueError` due to `root_spec` being `None` - return os.getcwd() - - if root_spec.submodule_search_locations: - if root_spec.origin is None or root_spec.origin == "namespace": - # namespace package - package_spec = importlib.util.find_spec(import_name) - - if package_spec is not None and package_spec.submodule_search_locations: - # Pick the path in the namespace that contains the submodule. - package_path = pathlib.Path( - os.path.commonpath(package_spec.submodule_search_locations) - ) - search_location = next( - location - for location in root_spec.submodule_search_locations - if package_path.is_relative_to(location) - ) - else: - # Pick the first path. - search_location = root_spec.submodule_search_locations[0] - - return os.path.dirname(search_location) - else: - # package with __init__.py - return os.path.dirname(os.path.dirname(root_spec.origin)) - else: - # module - return os.path.dirname(root_spec.origin) # type: ignore[type-var, return-value] - - -def find_package(import_name: str) -> tuple[str | None, str]: - """Find the prefix that a package is installed under, and the path - that it would be imported from. - - The prefix is the directory containing the standard directory - hierarchy (lib, bin, etc.). If the package is not installed to the - system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), - ``None`` is returned. - - The path is the entry in :attr:`sys.path` that contains the package - for import. If the package is not installed, it's assumed that the - package was imported from the current working directory. - """ - package_path = _find_package_path(import_name) - py_prefix = os.path.abspath(sys.prefix) - - # installed to the system - if pathlib.PurePath(package_path).is_relative_to(py_prefix): - return py_prefix, package_path - - site_parent, site_folder = os.path.split(package_path) - - # installed to a virtualenv - if site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - - # Windows (prefix/lib/site-packages) - if folder.lower() == "lib": - return parent, package_path - - # Unix (prefix/lib/pythonX.Y/site-packages) - if os.path.basename(parent).lower() == "lib": - return os.path.dirname(parent), package_path - - # something else (prefix/site-packages) - return site_parent, package_path - - # not installed - return None, package_path diff --git a/.venv/lib/python3.12/site-packages/flask/sessions.py b/.venv/lib/python3.12/site-packages/flask/sessions.py deleted file mode 100644 index 0a357d9..0000000 --- a/.venv/lib/python3.12/site-packages/flask/sessions.py +++ /dev/null @@ -1,399 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import hashlib -import typing as t -from collections.abc import MutableMapping -from datetime import datetime -from datetime import timezone - -from itsdangerous import BadSignature -from itsdangerous import URLSafeTimedSerializer -from werkzeug.datastructures import CallbackDict - -from .json.tag import TaggedJSONSerializer - -if t.TYPE_CHECKING: # pragma: no cover - import typing_extensions as te - - from .app import Flask - from .wrappers import Request - from .wrappers import Response - - -class SessionMixin(MutableMapping[str, t.Any]): - """Expands a basic dictionary with session attributes.""" - - @property - def permanent(self) -> bool: - """This reflects the ``'_permanent'`` key in the dict.""" - return self.get("_permanent", False) - - @permanent.setter - def permanent(self, value: bool) -> None: - self["_permanent"] = bool(value) - - #: Some implementations can detect whether a session is newly - #: created, but that is not guaranteed. Use with caution. The mixin - # default is hard-coded ``False``. - new = False - - #: Some implementations can detect changes to the session and set - #: this when that happens. The mixin default is hard coded to - #: ``True``. - modified = True - - #: Some implementations can detect when session data is read or - #: written and set this when that happens. The mixin default is hard - #: coded to ``True``. - accessed = True - - -class SecureCookieSession(CallbackDict[str, t.Any], SessionMixin): - """Base class for sessions based on signed cookies. - - This session backend will set the :attr:`modified` and - :attr:`accessed` attributes. It cannot reliably track whether a - session is new (vs. empty), so :attr:`new` remains hard coded to - ``False``. - """ - - #: When data is changed, this is set to ``True``. Only the session - #: dictionary itself is tracked; if the session contains mutable - #: data (for example a nested dict) then this must be set to - #: ``True`` manually when modifying that data. The session cookie - #: will only be written to the response if this is ``True``. - modified = False - - #: When data is read or written, this is set to ``True``. Used by - # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` - #: header, which allows caching proxies to cache different pages for - #: different users. - accessed = False - - def __init__( - self, - initial: c.Mapping[str, t.Any] | c.Iterable[tuple[str, t.Any]] | None = None, - ) -> None: - def on_update(self: te.Self) -> None: - self.modified = True - self.accessed = True - - super().__init__(initial, on_update) - - def __getitem__(self, key: str) -> t.Any: - self.accessed = True - return super().__getitem__(key) - - def get(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().get(key, default) - - def setdefault(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().setdefault(key, default) - - -class NullSession(SecureCookieSession): - """Class used to generate nicer error messages if sessions are not - available. Will still allow read-only access to the empty session - but fail on setting. - """ - - def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - raise RuntimeError( - "The session is unavailable because no secret " - "key was set. Set the secret_key on the " - "application to something unique and secret." - ) - - __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # noqa: B950 - del _fail - - -class SessionInterface: - """The basic interface you have to implement in order to replace the - default session interface which uses werkzeug's securecookie - implementation. The only methods you have to implement are - :meth:`open_session` and :meth:`save_session`, the others have - useful defaults which you don't need to change. - - The session object returned by the :meth:`open_session` method has to - provide a dictionary like interface plus the properties and methods - from the :class:`SessionMixin`. We recommend just subclassing a dict - and adding that mixin:: - - class Session(dict, SessionMixin): - pass - - If :meth:`open_session` returns ``None`` Flask will call into - :meth:`make_null_session` to create a session that acts as replacement - if the session support cannot work because some requirement is not - fulfilled. The default :class:`NullSession` class that is created - will complain that the secret key was not set. - - To replace the session interface on an application all you have to do - is to assign :attr:`flask.Flask.session_interface`:: - - app = Flask(__name__) - app.session_interface = MySessionInterface() - - Multiple requests with the same session may be sent and handled - concurrently. When implementing a new session interface, consider - whether reads or writes to the backing store must be synchronized. - There is no guarantee on the order in which the session for each - request is opened or saved, it will occur in the order that requests - begin and end processing. - - .. versionadded:: 0.8 - """ - - #: :meth:`make_null_session` will look here for the class that should - #: be created when a null session is requested. Likewise the - #: :meth:`is_null_session` method will perform a typecheck against - #: this type. - null_session_class = NullSession - - #: A flag that indicates if the session interface is pickle based. - #: This can be used by Flask extensions to make a decision in regards - #: to how to deal with the session object. - #: - #: .. versionadded:: 0.10 - pickle_based = False - - def make_null_session(self, app: Flask) -> NullSession: - """Creates a null session which acts as a replacement object if the - real session support could not be loaded due to a configuration - error. This mainly aids the user experience because the job of the - null session is to still support lookup without complaining but - modifications are answered with a helpful error message of what - failed. - - This creates an instance of :attr:`null_session_class` by default. - """ - return self.null_session_class() - - def is_null_session(self, obj: object) -> bool: - """Checks if a given object is a null session. Null sessions are - not asked to be saved. - - This checks if the object is an instance of :attr:`null_session_class` - by default. - """ - return isinstance(obj, self.null_session_class) - - def get_cookie_name(self, app: Flask) -> str: - """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" - return app.config["SESSION_COOKIE_NAME"] # type: ignore[no-any-return] - - def get_cookie_domain(self, app: Flask) -> str | None: - """The value of the ``Domain`` parameter on the session cookie. If not set, - browsers will only send the cookie to the exact domain it was set from. - Otherwise, they will send it to any subdomain of the given value as well. - - Uses the :data:`SESSION_COOKIE_DOMAIN` config. - - .. versionchanged:: 2.3 - Not set by default, does not fall back to ``SERVER_NAME``. - """ - return app.config["SESSION_COOKIE_DOMAIN"] # type: ignore[no-any-return] - - def get_cookie_path(self, app: Flask) -> str: - """Returns the path for which the cookie should be valid. The - default implementation uses the value from the ``SESSION_COOKIE_PATH`` - config var if it's set, and falls back to ``APPLICATION_ROOT`` or - uses ``/`` if it's ``None``. - """ - return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] # type: ignore[no-any-return] - - def get_cookie_httponly(self, app: Flask) -> bool: - """Returns True if the session cookie should be httponly. This - currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` - config var. - """ - return app.config["SESSION_COOKIE_HTTPONLY"] # type: ignore[no-any-return] - - def get_cookie_secure(self, app: Flask) -> bool: - """Returns True if the cookie should be secure. This currently - just returns the value of the ``SESSION_COOKIE_SECURE`` setting. - """ - return app.config["SESSION_COOKIE_SECURE"] # type: ignore[no-any-return] - - def get_cookie_samesite(self, app: Flask) -> str | None: - """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the - ``SameSite`` attribute. This currently just returns the value of - the :data:`SESSION_COOKIE_SAMESITE` setting. - """ - return app.config["SESSION_COOKIE_SAMESITE"] # type: ignore[no-any-return] - - def get_cookie_partitioned(self, app: Flask) -> bool: - """Returns True if the cookie should be partitioned. By default, uses - the value of :data:`SESSION_COOKIE_PARTITIONED`. - - .. versionadded:: 3.1 - """ - return app.config["SESSION_COOKIE_PARTITIONED"] # type: ignore[no-any-return] - - def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: - """A helper method that returns an expiration date for the session - or ``None`` if the session is linked to the browser session. The - default implementation returns now + the permanent session - lifetime configured on the application. - """ - if session.permanent: - return datetime.now(timezone.utc) + app.permanent_session_lifetime - return None - - def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: - """Used by session backends to determine if a ``Set-Cookie`` header - should be set for this session cookie for this response. If the session - has been modified, the cookie is set. If the session is permanent and - the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is - always set. - - This check is usually skipped if the session was deleted. - - .. versionadded:: 0.11 - """ - - return session.modified or ( - session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] - ) - - def open_session(self, app: Flask, request: Request) -> SessionMixin | None: - """This is called at the beginning of each request, after - pushing the request context, before matching the URL. - - This must return an object which implements a dictionary-like - interface as well as the :class:`SessionMixin` interface. - - This will return ``None`` to indicate that loading failed in - some way that is not immediately an error. The request - context will fall back to using :meth:`make_null_session` - in this case. - """ - raise NotImplementedError() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - """This is called at the end of each request, after generating - a response, before removing the request context. It is skipped - if :meth:`is_null_session` returns ``True``. - """ - raise NotImplementedError() - - -session_json_serializer = TaggedJSONSerializer() - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class SecureCookieSessionInterface(SessionInterface): - """The default session interface that stores sessions in signed cookies - through the :mod:`itsdangerous` module. - """ - - #: the salt that should be applied on top of the secret key for the - #: signing of cookie based sessions. - salt = "cookie-session" - #: the hash function to use for the signature. The default is sha1 - digest_method = staticmethod(_lazy_sha1) - #: the name of the itsdangerous supported key derivation. The default - #: is hmac. - key_derivation = "hmac" - #: A python serializer for the payload. The default is a compact - #: JSON derived serializer with support for some extra Python types - #: such as datetime objects or tuples. - serializer = session_json_serializer - session_class = SecureCookieSession - - def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: - if not app.secret_key: - return None - - keys: list[str | bytes] = [] - - if fallbacks := app.config["SECRET_KEY_FALLBACKS"]: - keys.extend(fallbacks) - - keys.append(app.secret_key) # itsdangerous expects current key at top - return URLSafeTimedSerializer( - keys, # type: ignore[arg-type] - salt=self.salt, - serializer=self.serializer, - signer_kwargs={ - "key_derivation": self.key_derivation, - "digest_method": self.digest_method, - }, - ) - - def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: - s = self.get_signing_serializer(app) - if s is None: - return None - val = request.cookies.get(self.get_cookie_name(app)) - if not val: - return self.session_class() - max_age = int(app.permanent_session_lifetime.total_seconds()) - try: - data = s.loads(val, max_age=max_age) - return self.session_class(data) - except BadSignature: - return self.session_class() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - name = self.get_cookie_name(app) - domain = self.get_cookie_domain(app) - path = self.get_cookie_path(app) - secure = self.get_cookie_secure(app) - partitioned = self.get_cookie_partitioned(app) - samesite = self.get_cookie_samesite(app) - httponly = self.get_cookie_httponly(app) - - # Add a "Vary: Cookie" header if the session was accessed at all. - if session.accessed: - response.vary.add("Cookie") - - # If the session is modified to be empty, remove the cookie. - # If the session is empty, return without setting the cookie. - if not session: - if session.modified: - response.delete_cookie( - name, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - httponly=httponly, - ) - response.vary.add("Cookie") - - return - - if not self.should_set_cookie(app, session): - return - - expires = self.get_expiration_time(app, session) - val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore[union-attr] - response.set_cookie( - name, - val, - expires=expires, - httponly=httponly, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - ) - response.vary.add("Cookie") diff --git a/.venv/lib/python3.12/site-packages/flask/signals.py b/.venv/lib/python3.12/site-packages/flask/signals.py deleted file mode 100644 index 444fda9..0000000 --- a/.venv/lib/python3.12/site-packages/flask/signals.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from blinker import Namespace - -# This namespace is only for signals provided by Flask itself. -_signals = Namespace() - -template_rendered = _signals.signal("template-rendered") -before_render_template = _signals.signal("before-render-template") -request_started = _signals.signal("request-started") -request_finished = _signals.signal("request-finished") -request_tearing_down = _signals.signal("request-tearing-down") -got_request_exception = _signals.signal("got-request-exception") -appcontext_tearing_down = _signals.signal("appcontext-tearing-down") -appcontext_pushed = _signals.signal("appcontext-pushed") -appcontext_popped = _signals.signal("appcontext-popped") -message_flashed = _signals.signal("message-flashed") diff --git a/.venv/lib/python3.12/site-packages/flask/templating.py b/.venv/lib/python3.12/site-packages/flask/templating.py deleted file mode 100644 index 16d480f..0000000 --- a/.venv/lib/python3.12/site-packages/flask/templating.py +++ /dev/null @@ -1,219 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2 import BaseLoader -from jinja2 import Environment as BaseEnvironment -from jinja2 import Template -from jinja2 import TemplateNotFound - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .helpers import stream_with_context -from .signals import before_render_template -from .signals import template_rendered - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .sansio.app import App - from .sansio.scaffold import Scaffold - - -def _default_template_ctx_processor() -> dict[str, t.Any]: - """Default template context processor. Injects `request`, - `session` and `g`. - """ - appctx = _cv_app.get(None) - reqctx = _cv_request.get(None) - rv: dict[str, t.Any] = {} - if appctx is not None: - rv["g"] = appctx.g - if reqctx is not None: - rv["request"] = reqctx.request - rv["session"] = reqctx.session - return rv - - -class Environment(BaseEnvironment): - """Works like a regular Jinja environment but has some additional - knowledge of how Flask's blueprint works so that it can prepend the - name of the blueprint to referenced templates if necessary. - """ - - def __init__(self, app: App, **options: t.Any) -> None: - if "loader" not in options: - options["loader"] = app.create_global_jinja_loader() - BaseEnvironment.__init__(self, **options) - self.app = app - - -class DispatchingJinjaLoader(BaseLoader): - """A loader that looks for templates in the application and all - the blueprint folders. - """ - - def __init__(self, app: App) -> None: - self.app = app - - def get_source( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: - return self._get_source_explained(environment, template) - return self._get_source_fast(environment, template) - - def _get_source_explained( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - attempts = [] - rv: tuple[str, str | None, t.Callable[[], bool] | None] | None - trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None - - for srcobj, loader in self._iter_loaders(template): - try: - rv = loader.get_source(environment, template) - if trv is None: - trv = rv - except TemplateNotFound: - rv = None - attempts.append((loader, srcobj, rv)) - - from .debughelpers import explain_template_loading_attempts - - explain_template_loading_attempts(self.app, template, attempts) - - if trv is not None: - return trv - raise TemplateNotFound(template) - - def _get_source_fast( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - for _srcobj, loader in self._iter_loaders(template): - try: - return loader.get_source(environment, template) - except TemplateNotFound: - continue - raise TemplateNotFound(template) - - def _iter_loaders(self, template: str) -> t.Iterator[tuple[Scaffold, BaseLoader]]: - loader = self.app.jinja_loader - if loader is not None: - yield self.app, loader - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - yield blueprint, loader - - def list_templates(self) -> list[str]: - result = set() - loader = self.app.jinja_loader - if loader is not None: - result.update(loader.list_templates()) - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - for template in loader.list_templates(): - result.add(template) - - return list(result) - - -def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - rv = template.render(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - return rv - - -def render_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> str: - """Render a template by name with the given context. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _render(app, template, context) - - -def render_template_string(source: str, **context: t.Any) -> str: - """Render a template from the given source string with the given - context. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _render(app, template, context) - - -def _stream( - app: Flask, template: Template, context: dict[str, t.Any] -) -> t.Iterator[str]: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - def generate() -> t.Iterator[str]: - yield from template.generate(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - rv = generate() - - # If a request context is active, keep it while generating. - if request: - rv = stream_with_context(rv) - - return rv - - -def stream_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> t.Iterator[str]: - """Render a template by name with the given context as a stream. - This returns an iterator of strings, which can be used as a - streaming response from a view. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _stream(app, template, context) - - -def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: - """Render a template from the given source string with the given - context as a stream. This returns an iterator of strings, which can - be used as a streaming response from a view. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _stream(app, template, context) diff --git a/.venv/lib/python3.12/site-packages/flask/testing.py b/.venv/lib/python3.12/site-packages/flask/testing.py deleted file mode 100644 index 55eb12f..0000000 --- a/.venv/lib/python3.12/site-packages/flask/testing.py +++ /dev/null @@ -1,298 +0,0 @@ -from __future__ import annotations - -import importlib.metadata -import typing as t -from contextlib import contextmanager -from contextlib import ExitStack -from copy import copy -from types import TracebackType -from urllib.parse import urlsplit - -import werkzeug.test -from click.testing import CliRunner -from click.testing import Result -from werkzeug.test import Client -from werkzeug.wrappers import Request as BaseRequest - -from .cli import ScriptInfo -from .sessions import SessionMixin - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - from werkzeug.test import TestResponse - - from .app import Flask - - -class EnvironBuilder(werkzeug.test.EnvironBuilder): - """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the - application. - - :param app: The Flask application to configure the environment from. - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - - def __init__( - self, - app: Flask, - path: str = "/", - base_url: str | None = None, - subdomain: str | None = None, - url_scheme: str | None = None, - *args: t.Any, - **kwargs: t.Any, - ) -> None: - assert not (base_url or subdomain or url_scheme) or ( - base_url is not None - ) != bool(subdomain or url_scheme), ( - 'Cannot pass "subdomain" or "url_scheme" with "base_url".' - ) - - if base_url is None: - http_host = app.config.get("SERVER_NAME") or "localhost" - app_root = app.config["APPLICATION_ROOT"] - - if subdomain: - http_host = f"{subdomain}.{http_host}" - - if url_scheme is None: - url_scheme = app.config["PREFERRED_URL_SCHEME"] - - url = urlsplit(path) - base_url = ( - f"{url.scheme or url_scheme}://{url.netloc or http_host}" - f"/{app_root.lstrip('/')}" - ) - path = url.path - - if url.query: - path = f"{path}?{url.query}" - - self.app = app - super().__init__(path, base_url, *args, **kwargs) - - def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize ``obj`` to a JSON-formatted string. - - The serialization will be configured according to the config associated - with this EnvironBuilder's ``app``. - """ - return self.app.json.dumps(obj, **kwargs) - - -_werkzeug_version = "" - - -def _get_werkzeug_version() -> str: - global _werkzeug_version - - if not _werkzeug_version: - _werkzeug_version = importlib.metadata.version("werkzeug") - - return _werkzeug_version - - -class FlaskClient(Client): - """Works like a regular Werkzeug test client but has knowledge about - Flask's contexts to defer the cleanup of the request context until - the end of a ``with`` block. For general information about how to - use this class refer to :class:`werkzeug.test.Client`. - - .. versionchanged:: 0.12 - `app.test_client()` includes preset default environment, which can be - set after instantiation of the `app.test_client()` object in - `client.environ_base`. - - Basic usage is outlined in the :doc:`/testing` chapter. - """ - - application: Flask - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - super().__init__(*args, **kwargs) - self.preserve_context = False - self._new_contexts: list[t.ContextManager[t.Any]] = [] - self._context_stack = ExitStack() - self.environ_base = { - "REMOTE_ADDR": "127.0.0.1", - "HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}", - } - - @contextmanager - def session_transaction( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Iterator[SessionMixin]: - """When used in combination with a ``with`` statement this opens a - session transaction. This can be used to modify the session that - the test client uses. Once the ``with`` block is left the session is - stored back. - - :: - - with client.session_transaction() as session: - session['value'] = 42 - - Internally this is implemented by going through a temporary test - request context and since session handling could depend on - request variables this function accepts the same arguments as - :meth:`~flask.Flask.test_request_context` which are directly - passed through. - """ - if self._cookies is None: - raise TypeError( - "Cookies are disabled. Create a client with 'use_cookies=True'." - ) - - app = self.application - ctx = app.test_request_context(*args, **kwargs) - self._add_cookies_to_wsgi(ctx.request.environ) - - with ctx: - sess = app.session_interface.open_session(app, ctx.request) - - if sess is None: - raise RuntimeError("Session backend did not open a session.") - - yield sess - resp = app.response_class() - - if app.session_interface.is_null_session(sess): - return - - with ctx: - app.session_interface.save_session(app, sess, resp) - - self._update_cookies_from_response( - ctx.request.host.partition(":")[0], - ctx.request.path, - resp.headers.getlist("Set-Cookie"), - ) - - def _copy_environ(self, other: WSGIEnvironment) -> WSGIEnvironment: - out = {**self.environ_base, **other} - - if self.preserve_context: - out["werkzeug.debug.preserve_context"] = self._new_contexts.append - - return out - - def _request_from_builder_args( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> BaseRequest: - kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) - builder = EnvironBuilder(self.application, *args, **kwargs) - - try: - return builder.get_request() - finally: - builder.close() - - def open( - self, - *args: t.Any, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> TestResponse: - if args and isinstance( - args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) - ): - if isinstance(args[0], werkzeug.test.EnvironBuilder): - builder = copy(args[0]) - builder.environ_base = self._copy_environ(builder.environ_base or {}) # type: ignore[arg-type] - request = builder.get_request() - elif isinstance(args[0], dict): - request = EnvironBuilder.from_environ( - args[0], app=self.application, environ_base=self._copy_environ({}) - ).get_request() - else: - # isinstance(args[0], BaseRequest) - request = copy(args[0]) - request.environ = self._copy_environ(request.environ) - else: - # request is None - request = self._request_from_builder_args(args, kwargs) - - # Pop any previously preserved contexts. This prevents contexts - # from being preserved across redirects or multiple requests - # within a single block. - self._context_stack.close() - - response = super().open( - request, - buffered=buffered, - follow_redirects=follow_redirects, - ) - response.json_module = self.application.json # type: ignore[assignment] - - # Re-push contexts that were preserved during the request. - for cm in self._new_contexts: - self._context_stack.enter_context(cm) - - self._new_contexts.clear() - return response - - def __enter__(self) -> FlaskClient: - if self.preserve_context: - raise RuntimeError("Cannot nest client invocations") - self.preserve_context = True - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.preserve_context = False - self._context_stack.close() - - -class FlaskCliRunner(CliRunner): - """A :class:`~click.testing.CliRunner` for testing a Flask app's - CLI commands. Typically created using - :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. - """ - - def __init__(self, app: Flask, **kwargs: t.Any) -> None: - self.app = app - super().__init__(**kwargs) - - def invoke( # type: ignore - self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any - ) -> Result: - """Invokes a CLI command in an isolated environment. See - :meth:`CliRunner.invoke ` for - full method documentation. See :ref:`testing-cli` for examples. - - If the ``obj`` argument is not given, passes an instance of - :class:`~flask.cli.ScriptInfo` that knows how to load the Flask - app being tested. - - :param cli: Command object to invoke. Default is the app's - :attr:`~flask.app.Flask.cli` group. - :param args: List of strings to invoke the command with. - - :return: a :class:`~click.testing.Result` object. - """ - if cli is None: - cli = self.app.cli - - if "obj" not in kwargs: - kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) - - return super().invoke(cli, args, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/flask/typing.py b/.venv/lib/python3.12/site-packages/flask/typing.py deleted file mode 100644 index 6b70c40..0000000 --- a/.venv/lib/python3.12/site-packages/flask/typing.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIApplication # noqa: F401 - from werkzeug.datastructures import Headers # noqa: F401 - from werkzeug.sansio.response import Response # noqa: F401 - -# The possible types that are directly convertible or are a Response object. -ResponseValue = t.Union[ - "Response", - str, - bytes, - list[t.Any], - # Only dict is actually accepted, but Mapping allows for TypedDict. - t.Mapping[str, t.Any], - t.Iterator[str], - t.Iterator[bytes], - cabc.AsyncIterable[str], # for Quart, until App is generic. - cabc.AsyncIterable[bytes], -] - -# the possible types for an individual HTTP header -# This should be a Union, but mypy doesn't pass unless it's a TypeVar. -HeaderValue = t.Union[str, list[str], tuple[str, ...]] - -# the possible types for HTTP headers -HeadersValue = t.Union[ - "Headers", - t.Mapping[str, HeaderValue], - t.Sequence[tuple[str, HeaderValue]], -] - -# The possible types returned by a route function. -ResponseReturnValue = t.Union[ - ResponseValue, - tuple[ResponseValue, HeadersValue], - tuple[ResponseValue, int], - tuple[ResponseValue, int, HeadersValue], - "WSGIApplication", -] - -# Allow any subclass of werkzeug.Response, such as the one from Flask, -# as a callback argument. Using werkzeug.Response directly makes a -# callback annotated with flask.Response fail type checking. -ResponseClass = t.TypeVar("ResponseClass", bound="Response") - -AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named -AfterRequestCallable = t.Union[ - t.Callable[[ResponseClass], ResponseClass], - t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], -] -BeforeFirstRequestCallable = t.Union[ - t.Callable[[], None], t.Callable[[], t.Awaitable[None]] -] -BeforeRequestCallable = t.Union[ - t.Callable[[], t.Optional[ResponseReturnValue]], - t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], -] -ShellContextProcessorCallable = t.Callable[[], dict[str, t.Any]] -TeardownCallable = t.Union[ - t.Callable[[t.Optional[BaseException]], None], - t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], -] -TemplateContextProcessorCallable = t.Union[ - t.Callable[[], dict[str, t.Any]], - t.Callable[[], t.Awaitable[dict[str, t.Any]]], -] -TemplateFilterCallable = t.Callable[..., t.Any] -TemplateGlobalCallable = t.Callable[..., t.Any] -TemplateTestCallable = t.Callable[..., bool] -URLDefaultCallable = t.Callable[[str, dict[str, t.Any]], None] -URLValuePreprocessorCallable = t.Callable[ - [t.Optional[str], t.Optional[dict[str, t.Any]]], None -] - -# This should take Exception, but that either breaks typing the argument -# with a specific exception, or decorating multiple times with different -# exceptions (and using a union type on the argument). -# https://github.com/pallets/flask/issues/4095 -# https://github.com/pallets/flask/issues/4295 -# https://github.com/pallets/flask/issues/4297 -ErrorHandlerCallable = t.Union[ - t.Callable[[t.Any], ResponseReturnValue], - t.Callable[[t.Any], t.Awaitable[ResponseReturnValue]], -] - -RouteCallable = t.Union[ - t.Callable[..., ResponseReturnValue], - t.Callable[..., t.Awaitable[ResponseReturnValue]], -] diff --git a/.venv/lib/python3.12/site-packages/flask/views.py b/.venv/lib/python3.12/site-packages/flask/views.py deleted file mode 100644 index 53fe976..0000000 --- a/.venv/lib/python3.12/site-packages/flask/views.py +++ /dev/null @@ -1,191 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import typing as ft -from .globals import current_app -from .globals import request - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -http_method_funcs = frozenset( - ["get", "post", "head", "options", "delete", "put", "trace", "patch"] -) - - -class View: - """Subclass this class and override :meth:`dispatch_request` to - create a generic class-based view. Call :meth:`as_view` to create a - view function that creates an instance of the class with the given - arguments and calls its ``dispatch_request`` method with any URL - variables. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class Hello(View): - init_every_request = False - - def dispatch_request(self, name): - return f"Hello, {name}!" - - app.add_url_rule( - "/hello/", view_func=Hello.as_view("hello") - ) - - Set :attr:`methods` on the class to change what methods the view - accepts. - - Set :attr:`decorators` on the class to apply a list of decorators to - the generated view function. Decorators applied to the class itself - will not be applied to the generated view function! - - Set :attr:`init_every_request` to ``False`` for efficiency, unless - you need to store request-global data on ``self``. - """ - - #: The methods this view is registered for. Uses the same default - #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and - #: ``add_url_rule`` by default. - methods: t.ClassVar[t.Collection[str] | None] = None - - #: Control whether the ``OPTIONS`` method is handled automatically. - #: Uses the same default (``True``) as ``route`` and - #: ``add_url_rule`` by default. - provide_automatic_options: t.ClassVar[bool | None] = None - - #: A list of decorators to apply, in order, to the generated view - #: function. Remember that ``@decorator`` syntax is applied bottom - #: to top, so the first decorator in the list would be the bottom - #: decorator. - #: - #: .. versionadded:: 0.8 - decorators: t.ClassVar[list[t.Callable[..., t.Any]]] = [] - - #: Create a new instance of this view class for every request by - #: default. If a view subclass sets this to ``False``, the same - #: instance is used for every request. - #: - #: A single instance is more efficient, especially if complex setup - #: is done during init. However, storing data on ``self`` is no - #: longer safe across requests, and :data:`~flask.g` should be used - #: instead. - #: - #: .. versionadded:: 2.2 - init_every_request: t.ClassVar[bool] = True - - def dispatch_request(self) -> ft.ResponseReturnValue: - """The actual view function behavior. Subclasses must override - this and return a valid response. Any variables from the URL - rule are passed as keyword arguments. - """ - raise NotImplementedError() - - @classmethod - def as_view( - cls, name: str, *class_args: t.Any, **class_kwargs: t.Any - ) -> ft.RouteCallable: - """Convert the class into a view function that can be registered - for a route. - - By default, the generated view will create a new instance of the - view class for every request and call its - :meth:`dispatch_request` method. If the view class sets - :attr:`init_every_request` to ``False``, the same instance will - be used for every request. - - Except for ``name``, all other arguments passed to this method - are forwarded to the view class ``__init__`` method. - - .. versionchanged:: 2.2 - Added the ``init_every_request`` class attribute. - """ - if cls.init_every_request: - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - self = view.view_class( # type: ignore[attr-defined] - *class_args, **class_kwargs - ) - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - else: - self = cls(*class_args, **class_kwargs) # pyright: ignore - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - if cls.decorators: - view.__name__ = name - view.__module__ = cls.__module__ - for decorator in cls.decorators: - view = decorator(view) - - # We attach the view class to the view function for two reasons: - # first of all it allows us to easily figure out what class-based - # view this thing came from, secondly it's also used for instantiating - # the view class so you can actually replace it with something else - # for testing purposes and debugging. - view.view_class = cls # type: ignore - view.__name__ = name - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.methods = cls.methods # type: ignore - view.provide_automatic_options = cls.provide_automatic_options # type: ignore - return view - - -class MethodView(View): - """Dispatches request methods to the corresponding instance methods. - For example, if you implement a ``get`` method, it will be used to - handle ``GET`` requests. - - This can be useful for defining a REST API. - - :attr:`methods` is automatically set based on the methods defined on - the class. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class CounterAPI(MethodView): - def get(self): - return str(session.get("counter", 0)) - - def post(self): - session["counter"] = session.get("counter", 0) + 1 - return redirect(url_for("counter")) - - app.add_url_rule( - "/counter", view_func=CounterAPI.as_view("counter") - ) - """ - - def __init_subclass__(cls, **kwargs: t.Any) -> None: - super().__init_subclass__(**kwargs) - - if "methods" not in cls.__dict__: - methods = set() - - for base in cls.__bases__: - if getattr(base, "methods", None): - methods.update(base.methods) # type: ignore[attr-defined] - - for key in http_method_funcs: - if hasattr(cls, key): - methods.add(key.upper()) - - if methods: - cls.methods = methods - - def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: - meth = getattr(self, request.method.lower(), None) - - # If the request method is HEAD and we don't have a handler for it - # retry with GET. - if meth is None and request.method == "HEAD": - meth = getattr(self, "get", None) - - assert meth is not None, f"Unimplemented method {request.method!r}" - return current_app.ensure_sync(meth)(**kwargs) # type: ignore[no-any-return] diff --git a/.venv/lib/python3.12/site-packages/flask/wrappers.py b/.venv/lib/python3.12/site-packages/flask/wrappers.py deleted file mode 100644 index bab6102..0000000 --- a/.venv/lib/python3.12/site-packages/flask/wrappers.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import annotations - -import typing as t - -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import HTTPException -from werkzeug.wrappers import Request as RequestBase -from werkzeug.wrappers import Response as ResponseBase - -from . import json -from .globals import current_app -from .helpers import _split_blueprint_path - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.routing import Rule - - -class Request(RequestBase): - """The request object used by default in Flask. Remembers the - matched endpoint and view arguments. - - It is what ends up as :class:`~flask.request`. If you want to replace - the request object used you can subclass this and set - :attr:`~flask.Flask.request_class` to your subclass. - - The request object is a :class:`~werkzeug.wrappers.Request` subclass and - provides all of the attributes Werkzeug defines plus a few Flask - specific ones. - """ - - json_module: t.Any = json - - #: The internal URL rule that matched the request. This can be - #: useful to inspect which methods are allowed for the URL from - #: a before/after handler (``request.url_rule.methods``) etc. - #: Though if the request's method was invalid for the URL rule, - #: the valid list is available in ``routing_exception.valid_methods`` - #: instead (an attribute of the Werkzeug exception - #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) - #: because the request was never internally bound. - #: - #: .. versionadded:: 0.6 - url_rule: Rule | None = None - - #: A dict of view arguments that matched the request. If an exception - #: happened when matching, this will be ``None``. - view_args: dict[str, t.Any] | None = None - - #: If matching the URL failed, this is the exception that will be - #: raised / was raised as part of the request handling. This is - #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or - #: something similar. - routing_exception: HTTPException | None = None - - _max_content_length: int | None = None - _max_form_memory_size: int | None = None - _max_form_parts: int | None = None - - @property - def max_content_length(self) -> int | None: - """The maximum number of bytes that will be read during this request. If - this limit is exceeded, a 413 :exc:`~werkzeug.exceptions.RequestEntityTooLarge` - error is raised. If it is set to ``None``, no limit is enforced at the - Flask application level. However, if it is ``None`` and the request has - no ``Content-Length`` header and the WSGI server does not indicate that - it terminates the stream, then no data is read to avoid an infinite - stream. - - Each request defaults to the :data:`MAX_CONTENT_LENGTH` config, which - defaults to ``None``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This can be set per-request. - - .. versionchanged:: 0.6 - This is configurable through Flask config. - """ - if self._max_content_length is not None: - return self._max_content_length - - if not current_app: - return super().max_content_length - - return current_app.config["MAX_CONTENT_LENGTH"] # type: ignore[no-any-return] - - @max_content_length.setter - def max_content_length(self, value: int | None) -> None: - self._max_content_length = value - - @property - def max_form_memory_size(self) -> int | None: - """The maximum size in bytes any non-file form field may be in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_MEMORY_SIZE` config, which - defaults to ``500_000``. It can be set on a specific ``request`` to - apply the limit to that specific view. This should be set appropriately - based on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_memory_size is not None: - return self._max_form_memory_size - - if not current_app: - return super().max_form_memory_size - - return current_app.config["MAX_FORM_MEMORY_SIZE"] # type: ignore[no-any-return] - - @max_form_memory_size.setter - def max_form_memory_size(self, value: int | None) -> None: - self._max_form_memory_size = value - - @property # type: ignore[override] - def max_form_parts(self) -> int | None: - """The maximum number of fields that may be present in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_PARTS` config, which - defaults to ``1_000``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_parts is not None: - return self._max_form_parts - - if not current_app: - return super().max_form_parts - - return current_app.config["MAX_FORM_PARTS"] # type: ignore[no-any-return] - - @max_form_parts.setter - def max_form_parts(self, value: int | None) -> None: - self._max_form_parts = value - - @property - def endpoint(self) -> str | None: - """The endpoint that matched the request URL. - - This will be ``None`` if matching failed or has not been - performed yet. - - This in combination with :attr:`view_args` can be used to - reconstruct the same URL or a modified URL. - """ - if self.url_rule is not None: - return self.url_rule.endpoint # type: ignore[no-any-return] - - return None - - @property - def blueprint(self) -> str | None: - """The registered name of the current blueprint. - - This will be ``None`` if the endpoint is not part of a - blueprint, or if URL matching failed or has not been performed - yet. - - This does not necessarily match the name the blueprint was - created with. It may have been nested, or registered with a - different name. - """ - endpoint = self.endpoint - - if endpoint is not None and "." in endpoint: - return endpoint.rpartition(".")[0] - - return None - - @property - def blueprints(self) -> list[str]: - """The registered names of the current blueprint upwards through - parent blueprints. - - This will be an empty list if there is no current blueprint, or - if URL matching failed. - - .. versionadded:: 2.0.1 - """ - name = self.blueprint - - if name is None: - return [] - - return _split_blueprint_path(name) - - def _load_form_data(self) -> None: - super()._load_form_data() - - # In debug mode we're replacing the files multidict with an ad-hoc - # subclass that raises a different error for key errors. - if ( - current_app - and current_app.debug - and self.mimetype != "multipart/form-data" - and not self.files - ): - from .debughelpers import attach_enctype_error_multidict - - attach_enctype_error_multidict(self) - - def on_json_loading_failed(self, e: ValueError | None) -> t.Any: - try: - return super().on_json_loading_failed(e) - except BadRequest as ebr: - if current_app and current_app.debug: - raise - - raise BadRequest() from ebr - - -class Response(ResponseBase): - """The response object that is used by default in Flask. Works like the - response object from Werkzeug but is set to have an HTML mimetype by - default. Quite often you don't have to create this object yourself because - :meth:`~flask.Flask.make_response` will take care of that for you. - - If you want to replace the response object used you can subclass this and - set :attr:`~flask.Flask.response_class` to your subclass. - - .. versionchanged:: 1.0 - JSON support is added to the response, like the request. This is useful - when testing to get the test client response data as JSON. - - .. versionchanged:: 1.0 - - Added :attr:`max_cookie_size`. - """ - - default_mimetype: str | None = "text/html" - - json_module = json - - autocorrect_location_header = False - - @property - def max_cookie_size(self) -> int: # type: ignore - """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. - - See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in - Werkzeug's docs. - """ - if current_app: - return current_app.config["MAX_COOKIE_SIZE"] # type: ignore[no-any-return] - - # return Werkzeug's default when not in an app context - return super().max_cookie_size diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/METADATA b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/METADATA deleted file mode 100644 index 786a01c..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/METADATA +++ /dev/null @@ -1,143 +0,0 @@ -Metadata-Version: 2.4 -Name: flask-cors -Version: 6.0.1 -Summary: A Flask extension simplifying CORS support -Author-email: Cory Dolphin -Project-URL: Homepage, https://corydolphin.github.io/flask-cors/ -Project-URL: Repository, https://github.com/corydolphin/flask-cors -Project-URL: Documentation, https://corydolphin.github.io/flask-cors/ -Keywords: python -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: <4.0,>=3.9 -Description-Content-Type: text/x-rst -Requires-Dist: flask>=0.9 -Requires-Dist: Werkzeug>=0.7 - -Flask-CORS -========== - -|Build Status| |Latest Version| |Supported Python versions| -|License| - -A Flask extension for handling Cross Origin Resource Sharing (CORS), making cross-origin AJAX possible. - -This package has a simple philosophy: when you want to enable CORS, you wish to enable it for all use cases on a domain. -This means no mucking around with different allowed headers, methods, etc. - -By default, submission of cookies across domains is disabled due to the security implications. -Please see the documentation for how to enable credential'ed requests, and please make sure you add some sort of `CSRF `__ protection before doing so! - -Installation ------------- - -Install the extension with using pip, or easy\_install. - -.. code:: bash - - $ pip install -U flask-cors - -Usage ------ - -This package exposes a Flask extension which by default enables CORS support on all routes, for all origins and methods. -It allows parameterization of all CORS headers on a per-resource level. -The package also contains a decorator, for those who prefer this approach. - -Simple Usage -~~~~~~~~~~~~ - -In the simplest case, initialize the Flask-Cors extension with default arguments in order to allow CORS for all domains on all routes. -See the full list of options in the `documentation `__. - -.. code:: python - - - from flask import Flask - from flask_cors import CORS - - app = Flask(__name__) - CORS(app) - - @app.route("/") - def helloWorld(): - return "Hello, cross-origin-world!" - -Resource specific CORS -^^^^^^^^^^^^^^^^^^^^^^ - -Alternatively, you can specify CORS options on a resource and origin level of granularity by passing a dictionary as the `resources` option, mapping paths to a set of options. -See the full list of options in the `documentation `__. - -.. code:: python - - app = Flask(__name__) - cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) - - @app.route("/api/v1/users") - def list_users(): - return "user example" - -Route specific CORS via decorator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This extension also exposes a simple decorator to decorate flask routes with. -Simply add ``@cross_origin()`` below a call to Flask's ``@app.route(..)`` to allow CORS on a given route. -See the full list of options in the `decorator documentation `__. - -.. code:: python - - @app.route("/") - @cross_origin() - def helloWorld(): - return "Hello, cross-origin-world!" - -Documentation -------------- - -For a full list of options, please see the full `documentation `__ - -Troubleshooting ---------------- - -If things aren't working as you expect, enable logging to help understand what is going on under the hood, and why. - -.. code:: python - - logging.getLogger('flask_cors').level = logging.DEBUG - - - -Set Up Your Development Environment ---- -The development environment uses `uv` for Python version management as well as dependency management. -There are helpful Makefile targets to do everything you need. Use `make test` to get started! - - -Contributing ------------- - -Questions, comments or improvements? -Please create an issue on `Github `__, tweet at `@corydolphin `__ or send me an email. -I do my best to include every contribution proposed in any way that I can. - -Credits -------- - -This Flask extension is based upon the `Decorator for the HTTP Access Control `__ written by Armin Ronacher. - -.. |Build Status| image:: https://github.com/corydolphin/flask-cors/actions/workflows/unittests.yaml/badge.svg - :target: https://travis-ci.org/corydolphin/flask-cors -.. |Latest Version| image:: https://img.shields.io/pypi/v/Flask-Cors.svg - :target: https://pypi.python.org/pypi/Flask-Cors/ -.. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/Flask-Cors.svg - :target: https://img.shields.io/pypi/pyversions/Flask-Cors.svg -.. |License| image:: http://img.shields.io/:license-mit-blue.svg - :target: https://pypi.python.org/pypi/Flask-Cors/ diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/RECORD b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/RECORD deleted file mode 100644 index c78373c..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/RECORD +++ /dev/null @@ -1,16 +0,0 @@ -flask_cors-6.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask_cors-6.0.1.dist-info/METADATA,sha256=vdM_HWOTtWgELoF6BIMIUN4R-VH0ACm8zKIxqyFfuUk,5303 -flask_cors-6.0.1.dist-info/RECORD,, -flask_cors-6.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_cors-6.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -flask_cors-6.0.1.dist-info/top_level.txt,sha256=aWye_0QNZPp_QtPF4ZluLHqnyVLT9CPJsfiGhwqkWuo,11 -flask_cors/__init__.py,sha256=Hnwpy4nxEz4tlcB3enWNXUBctcjgxRnUqa5B5zDabQ4,522 -flask_cors/__pycache__/__init__.cpython-312.pyc,, -flask_cors/__pycache__/core.cpython-312.pyc,, -flask_cors/__pycache__/decorator.cpython-312.pyc,, -flask_cors/__pycache__/extension.cpython-312.pyc,, -flask_cors/__pycache__/version.cpython-312.pyc,, -flask_cors/core.py,sha256=S8Wv7dz9e0FcR386TkBT-T8_VRsjFQGhNiR0jIpUYiY,14416 -flask_cors/decorator.py,sha256=YnPIDyWAfz60t2yxLgoOZLzgWWjZEvxQAPTTV4qfsss,4706 -flask_cors/extension.py,sha256=ONk6eNJiYT7HPEtmmySG8KgjzvAs9xtcJHNYvWLAS9s,8268 -flask_cors/version.py,sha256=unmO3xtN2S1e9meUbIjG3AZVyKxUe4HSZNXDOcpZRJg,22 diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/REQUESTED b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/top_level.txt b/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/top_level.txt deleted file mode 100644 index 27af988..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors-6.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_cors diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__init__.py b/.venv/lib/python3.12/site-packages/flask_cors/__init__.py deleted file mode 100644 index 732d401..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .decorator import cross_origin -from .extension import CORS -from .version import __version__ - -__all__ = ["CORS", "__version__", "cross_origin"] - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -# Set initial level to WARN. Users must manually enable logging for -# flask_cors to see our logging. -rootlogger = logging.getLogger(__name__) -rootlogger.addHandler(NullHandler()) - -if rootlogger.level == logging.NOTSET: - rootlogger.setLevel(logging.WARN) diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d2e05d5..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc deleted file mode 100644 index 7d6f6a5..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc deleted file mode 100644 index 0ca00db..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/decorator.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc deleted file mode 100644 index 39fa93a..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/extension.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc b/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 3209d1b..0000000 Binary files a/.venv/lib/python3.12/site-packages/flask_cors/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/flask_cors/core.py b/.venv/lib/python3.12/site-packages/flask_cors/core.py deleted file mode 100644 index 8df343c..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors/core.py +++ /dev/null @@ -1,399 +0,0 @@ -import logging -import re -from collections.abc import Iterable -from datetime import timedelta - -from flask import current_app, request -from werkzeug.datastructures import Headers, MultiDict - -LOG = logging.getLogger(__name__) - -# Response Headers -ACL_ORIGIN = "Access-Control-Allow-Origin" -ACL_METHODS = "Access-Control-Allow-Methods" -ACL_ALLOW_HEADERS = "Access-Control-Allow-Headers" -ACL_EXPOSE_HEADERS = "Access-Control-Expose-Headers" -ACL_CREDENTIALS = "Access-Control-Allow-Credentials" -ACL_MAX_AGE = "Access-Control-Max-Age" -ACL_RESPONSE_PRIVATE_NETWORK = "Access-Control-Allow-Private-Network" - -# Request Header -ACL_REQUEST_METHOD = "Access-Control-Request-Method" -ACL_REQUEST_HEADERS = "Access-Control-Request-Headers" -ACL_REQUEST_HEADER_PRIVATE_NETWORK = "Access-Control-Request-Private-Network" - -ALL_METHODS = ["GET", "HEAD", "POST", "OPTIONS", "PUT", "PATCH", "DELETE"] -CONFIG_OPTIONS = [ - "CORS_ORIGINS", - "CORS_METHODS", - "CORS_ALLOW_HEADERS", - "CORS_EXPOSE_HEADERS", - "CORS_SUPPORTS_CREDENTIALS", - "CORS_MAX_AGE", - "CORS_SEND_WILDCARD", - "CORS_AUTOMATIC_OPTIONS", - "CORS_VARY_HEADER", - "CORS_RESOURCES", - "CORS_INTERCEPT_EXCEPTIONS", - "CORS_ALWAYS_SEND", - "CORS_ALLOW_PRIVATE_NETWORK", -] -# Attribute added to request object by decorator to indicate that CORS -# was evaluated, in case the decorator and extension are both applied -# to a view. -FLASK_CORS_EVALUATED = "_FLASK_CORS_EVALUATED" - -# Strange, but this gets the type of a compiled regex, which is otherwise not -# exposed in a public API. -RegexObject = type(re.compile("")) -DEFAULT_OPTIONS = dict( - origins="*", - methods=ALL_METHODS, - allow_headers="*", - expose_headers=None, - supports_credentials=False, - max_age=None, - send_wildcard=False, - automatic_options=True, - vary_header=True, - resources=r"/*", - intercept_exceptions=True, - always_send=True, - allow_private_network=False, -) - - -def parse_resources(resources): - if isinstance(resources, dict): - # To make the API more consistent with the decorator, allow a - # resource of '*', which is not actually a valid regexp. - resources = [(re_fix(k), v) for k, v in resources.items()] - - # Sort patterns with static (literal) paths first, then by regex specificity - def sort_key(pair): - pattern, _ = pair - if isinstance(pattern, RegexObject): - return (1, 0, -pattern.pattern.count("/"), -len(pattern.pattern)) - elif probably_regex(pattern): - return (1, 1, -pattern.count("/"), -len(pattern)) - else: - return (0, 0, -pattern.count("/"), -len(pattern)) - - return sorted(resources, key=sort_key) - - elif isinstance(resources, str): - return [(re_fix(resources), {})] - - elif isinstance(resources, Iterable): - return [(re_fix(r), {}) for r in resources] - - # Type of compiled regex is not part of the public API. Test for this - # at runtime. - elif isinstance(resources, RegexObject): - return [(re_fix(resources), {})] - - else: - raise ValueError("Unexpected value for resources argument.") - - -def get_regexp_pattern(regexp): - """ - Helper that returns regexp pattern from given value. - - :param regexp: regular expression to stringify - :type regexp: _sre.SRE_Pattern or str - :returns: string representation of given regexp pattern - :rtype: str - """ - try: - return regexp.pattern - except AttributeError: - return str(regexp) - - -def get_cors_origins(options, request_origin): - origins = options.get("origins") - wildcard = r".*" in origins - - # If the Origin header is not present terminate this set of steps. - # The request is outside the scope of this specification.-- W3Spec - if request_origin: - LOG.debug("CORS request received with 'Origin' %s", request_origin) - - # If the allowed origins is an asterisk or 'wildcard', always match - if wildcard and options.get("send_wildcard"): - LOG.debug("Allowed origins are set to '*'. Sending wildcard CORS header.") - return ["*"] - # If the value of the Origin header is a case-insensitive match - # for any of the values in list of origins. - # NOTE: Per RFC 1035 and RFC 4343 schemes and hostnames are case insensitive. - elif try_match_any_pattern(request_origin, origins, caseSensitive=False): - LOG.debug( - "The request's Origin header matches. Sending CORS headers.", - ) - # Add a single Access-Control-Allow-Origin header, with either - # the value of the Origin header or the string "*" as value. - # -- W3Spec - return [request_origin] - else: - LOG.debug("The request's Origin header does not match any of allowed origins.") - return None - - elif options.get("always_send"): - if wildcard: - # If wildcard is in the origins, even if 'send_wildcard' is False, - # simply send the wildcard. Unless supports_credentials is True, - # since that is forbidden by the spec.. - # It is the most-likely to be correct thing to do (the only other - # option is to return nothing, which almost certainly not what - # the developer wants if the '*' origin was specified. - if options.get("supports_credentials"): - return None - else: - return ["*"] - else: - # Return all origins that are not regexes. - return sorted([o for o in origins if not probably_regex(o)]) - - # Terminate these steps, return the original request untouched. - else: - LOG.debug( - "The request did not contain an 'Origin' header. This means the browser or client did not request CORS, ensure the Origin Header is set." - ) - return None - - -def get_allow_headers(options, acl_request_headers): - if acl_request_headers: - request_headers = [h.strip() for h in acl_request_headers.split(",")] - - # any header that matches in the allow_headers - matching_headers = filter(lambda h: try_match_any_pattern(h, options.get("allow_headers"), caseSensitive=False), request_headers) - - return ", ".join(sorted(matching_headers)) - - return None - - -def get_cors_headers(options, request_headers, request_method): - origins_to_set = get_cors_origins(options, request_headers.get("Origin")) - headers = MultiDict() - - if not origins_to_set: # CORS is not enabled for this route - return headers - - for origin in origins_to_set: - headers.add(ACL_ORIGIN, origin) - - headers[ACL_EXPOSE_HEADERS] = options.get("expose_headers") - - if options.get("supports_credentials"): - headers[ACL_CREDENTIALS] = "true" # case sensitive - - if ( - ACL_REQUEST_HEADER_PRIVATE_NETWORK in request_headers - and request_headers.get(ACL_REQUEST_HEADER_PRIVATE_NETWORK) == "true" - ): - allow_private_network = "true" if options.get("allow_private_network") else "false" - headers[ACL_RESPONSE_PRIVATE_NETWORK] = allow_private_network - - # This is a preflight request - # http://www.w3.org/TR/cors/#resource-preflight-requests - if request_method == "OPTIONS": - acl_request_method = request_headers.get(ACL_REQUEST_METHOD, "").upper() - - # If there is no Access-Control-Request-Method header or if parsing - # failed, do not set any additional headers - if acl_request_method and acl_request_method in options.get("methods"): - # If method is not a case-sensitive match for any of the values in - # list of methods do not set any additional headers and terminate - # this set of steps. - headers[ACL_ALLOW_HEADERS] = get_allow_headers(options, request_headers.get(ACL_REQUEST_HEADERS)) - headers[ACL_MAX_AGE] = options.get("max_age") - headers[ACL_METHODS] = options.get("methods") - else: - LOG.info( - "The request's Access-Control-Request-Method header does not match allowed methods. CORS headers will not be applied." - ) - - # http://www.w3.org/TR/cors/#resource-implementation - if options.get("vary_header"): - # Only set header if the origin returned will vary dynamically, - # i.e. if we are not returning an asterisk, and there are multiple - # origins that can be matched. - if headers[ACL_ORIGIN] == "*": - pass - elif ( - len(options.get("origins")) > 1 - or len(origins_to_set) > 1 - or any(map(probably_regex, options.get("origins"))) - ): - headers.add("Vary", "Origin") - - return MultiDict((k, v) for k, v in headers.items() if v) - - -def set_cors_headers(resp, options): - """ - Performs the actual evaluation of Flask-CORS options and actually - modifies the response object. - - This function is used both in the decorator and the after_request - callback - """ - - # If CORS has already been evaluated via the decorator, skip - if hasattr(resp, FLASK_CORS_EVALUATED): - LOG.debug("CORS have been already evaluated, skipping") - return resp - - # Some libraries, like OAuthlib, set resp.headers to non Multidict - # objects (Werkzeug Headers work as well). This is a problem because - # headers allow repeated values. - if not isinstance(resp.headers, Headers) and not isinstance(resp.headers, MultiDict): - resp.headers = MultiDict(resp.headers) - - headers_to_set = get_cors_headers(options, request.headers, request.method) - - LOG.debug("Settings CORS headers: %s", str(headers_to_set)) - - for k, v in headers_to_set.items(): - resp.headers.add(k, v) - - return resp - - -def probably_regex(maybe_regex): - if isinstance(maybe_regex, RegexObject): - return True - else: - common_regex_chars = ["*", "\\", "]", "?", "$", "^", "[", "]", "(", ")"] - # Use common characters used in regular expressions as a proxy - # for if this string is in fact a regex. - return any(c in maybe_regex for c in common_regex_chars) - - -def re_fix(reg): - """ - Replace the invalid regex r'*' with the valid, wildcard regex r'/.*' to - enable the CORS app extension to have a more user friendly api. - """ - return r".*" if reg == r"*" else reg - - -def try_match_any_pattern(inst, patterns, caseSensitive=True): - return any(try_match_pattern(inst, pattern, caseSensitive) for pattern in patterns) - -def try_match_pattern(value, pattern, caseSensitive=True): - """ - Safely attempts to match a pattern or string to a value. This - function can be used to match request origins, headers, or paths. - The value of caseSensitive should be set in accordance to the - data being compared e.g. origins and headers are case insensitive - whereas paths are case-sensitive - """ - if isinstance(pattern, RegexObject): - return re.match(pattern, value) - if probably_regex(pattern): - flags = 0 if caseSensitive else re.IGNORECASE - try: - return re.match(pattern, value, flags=flags) - except re.error: - return False - try: - v = str(value) - p = str(pattern) - return v == p if caseSensitive else v.casefold() == p.casefold() - except Exception: - return value == pattern - -def get_cors_options(appInstance, *dicts): - """ - Compute CORS options for an application by combining the DEFAULT_OPTIONS, - the app's configuration-specified options and any dictionaries passed. The - last specified option wins. - """ - options = DEFAULT_OPTIONS.copy() - options.update(get_app_kwarg_dict(appInstance)) - if dicts: - for d in dicts: - options.update(d) - - return serialize_options(options) - - -def get_app_kwarg_dict(appInstance=None): - """Returns the dictionary of CORS specific app configurations.""" - app = appInstance or current_app - - # In order to support blueprints which do not have a config attribute - app_config = getattr(app, "config", {}) - - return {k.lower().replace("cors_", ""): app_config.get(k) for k in CONFIG_OPTIONS if app_config.get(k) is not None} - - -def flexible_str(obj): - """ - A more flexible str function which intelligently handles stringifying - strings, lists and other iterables. The results are lexographically sorted - to ensure generated responses are consistent when iterables such as Set - are used. - """ - if obj is None: - return None - elif not isinstance(obj, str) and isinstance(obj, Iterable): - return ", ".join(str(item) for item in sorted(obj)) - else: - return str(obj) - - -def serialize_option(options_dict, key, upper=False): - if key in options_dict: - value = flexible_str(options_dict[key]) - options_dict[key] = value.upper() if upper else value - - -def ensure_iterable(inst): - """ - Wraps scalars or string types as a list, or returns the iterable instance. - """ - if isinstance(inst, str) or not isinstance(inst, Iterable): - return [inst] - else: - return inst - - -def sanitize_regex_param(param): - return [re_fix(x) for x in ensure_iterable(param)] - - -def serialize_options(opts): - """ - A helper method to serialize and processes the options dictionary. - """ - options = (opts or {}).copy() - - for key in opts.keys(): - if key not in DEFAULT_OPTIONS: - LOG.warning("Unknown option passed to Flask-CORS: %s", key) - - # Ensure origins is a list of allowed origins with at least one entry. - options["origins"] = sanitize_regex_param(options.get("origins")) - options["allow_headers"] = sanitize_regex_param(options.get("allow_headers")) - - # This is expressly forbidden by the spec. Raise a value error so people - # don't get burned in production. - if r".*" in options["origins"] and options["supports_credentials"] and options["send_wildcard"]: - raise ValueError( - "Cannot use supports_credentials in conjunction with" - "an origin string of '*'. See: " - "http://www.w3.org/TR/cors/#resource-requests" - ) - - serialize_option(options, "expose_headers") - serialize_option(options, "methods", upper=True) - - if isinstance(options.get("max_age"), timedelta): - options["max_age"] = str(int(options["max_age"].total_seconds())) - - return options diff --git a/.venv/lib/python3.12/site-packages/flask_cors/decorator.py b/.venv/lib/python3.12/site-packages/flask_cors/decorator.py deleted file mode 100644 index f8915b6..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors/decorator.py +++ /dev/null @@ -1,129 +0,0 @@ -import logging -from functools import update_wrapper - -from flask import current_app, make_response, request - -from .core import FLASK_CORS_EVALUATED, get_cors_options, set_cors_headers - -LOG = logging.getLogger(__name__) - - -def cross_origin(*args, **kwargs): - """ - This function is the decorator which is used to wrap a Flask route with. - In the simplest case, simply use the default parameters to allow all - origins in what is the most permissive configuration. If this method - modifies state or performs authentication which may be brute-forced, you - should add some degree of protection, such as Cross Site Request Forgery - protection. - - :param origins: - The origin, or list of origins to allow requests from. - The origin(s) may be regular expressions, case-sensitive strings, - or else an asterisk - - Default : '*' - :type origins: list, string or regex - - :param methods: - The method or list of methods which the allowed origins are allowed to - access for non-simple requests. - - Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE] - :type methods: list or string - - :param expose_headers: - The header or list which are safe to expose to the API of a CORS API - specification. - - Default : None - :type expose_headers: list or string - - :param allow_headers: - The header or list of header field names which can be used when this - resource is accessed by allowed origins. The header(s) may be regular - expressions, case-sensitive strings, or else an asterisk. - - Default : '*', allow all headers - :type allow_headers: list, string or regex - - :param supports_credentials: - Allows users to make authenticated requests. If true, injects the - `Access-Control-Allow-Credentials` header in responses. This allows - cookies and credentials to be submitted across domains. - - :note: This option cannot be used in conjunction with a '*' origin - - Default : False - :type supports_credentials: bool - - :param max_age: - The maximum time for which this CORS request maybe cached. This value - is set as the `Access-Control-Max-Age` header. - - Default : None - :type max_age: timedelta, integer, string or None - - :param send_wildcard: If True, and the origins parameter is `*`, a wildcard - `Access-Control-Allow-Origin` header is sent, rather than the - request's `Origin` header. - - Default : False - :type send_wildcard: bool - - :param vary_header: - If True, the header Vary: Origin will be returned as per the W3 - implementation guidelines. - - Setting this header when the `Access-Control-Allow-Origin` is - dynamically generated (e.g. when there is more than one allowed - origin, and an Origin than '*' is returned) informs CDNs and other - caches that the CORS headers are dynamic, and cannot be cached. - - If False, the Vary header will never be injected or altered. - - Default : True - :type vary_header: bool - - :param automatic_options: - Only applies to the `cross_origin` decorator. If True, Flask-CORS will - override Flask's default OPTIONS handling to return CORS headers for - OPTIONS requests. - - Default : True - :type automatic_options: bool - - """ - _options = kwargs - - def decorator(f): - LOG.debug("Enabling %s for cross_origin using options:%s", f, _options) - - # If True, intercept OPTIONS requests by modifying the view function, - # replicating Flask's default behavior, and wrapping the response with - # CORS headers. - # - # If f.provide_automatic_options is unset or True, Flask's route - # decorator (which is actually wraps the function object we return) - # intercepts OPTIONS handling, and requests will not have CORS headers - if _options.get("automatic_options", True): - f.required_methods = getattr(f, "required_methods", set()) - f.required_methods.add("OPTIONS") - f.provide_automatic_options = False - - def wrapped_function(*args, **kwargs): - # Handle setting of Flask-Cors parameters - options = get_cors_options(current_app, _options) - - if options.get("automatic_options") and request.method == "OPTIONS": - resp = current_app.make_default_options_response() - else: - resp = make_response(f(*args, **kwargs)) - - set_cors_headers(resp, options) - setattr(resp, FLASK_CORS_EVALUATED, True) - return resp - - return update_wrapper(wrapped_function, f) - - return decorator diff --git a/.venv/lib/python3.12/site-packages/flask_cors/extension.py b/.venv/lib/python3.12/site-packages/flask_cors/extension.py deleted file mode 100644 index 434f65e..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors/extension.py +++ /dev/null @@ -1,206 +0,0 @@ -import logging -from urllib.parse import unquote - -from flask import request - -from .core import ACL_ORIGIN, get_cors_options, get_regexp_pattern, parse_resources, set_cors_headers, try_match_pattern - -LOG = logging.getLogger(__name__) - - -class CORS: - """ - Initializes Cross Origin Resource sharing for the application. The - arguments are identical to `cross_origin`, with the addition of a - `resources` parameter. The resources parameter defines a series of regular - expressions for resource paths to match and optionally, the associated - options to be applied to the particular resource. These options are - identical to the arguments to `cross_origin`. - - The settings for CORS are determined in the following order - - 1. Resource level settings (e.g when passed as a dictionary) - 2. Keyword argument settings - 3. App level configuration settings (e.g. CORS_*) - 4. Default settings - - Note: as it is possible for multiple regular expressions to match a - resource path, the regular expressions are first sorted by length, - from longest to shortest, in order to attempt to match the most - specific regular expression. This allows the definition of a - number of specific resource options, with a wildcard fallback - for all other resources. - - :param resources: - The series of regular expression and (optionally) associated CORS - options to be applied to the given resource path. - - If the argument is a dictionary, it's keys must be regular expressions, - and the values must be a dictionary of kwargs, identical to the kwargs - of this function. - - If the argument is a list, it is expected to be a list of regular - expressions, for which the app-wide configured options are applied. - - If the argument is a string, it is expected to be a regular expression - for which the app-wide configured options are applied. - - Default : Match all and apply app-level configuration - - :type resources: dict, iterable or string - - :param origins: - The origin, or list of origins to allow requests from. - The origin(s) may be regular expressions, case-sensitive strings, - or else an asterisk. - - .. note:: - - origins must include the schema and the port (if not port 80), - e.g., - `CORS(app, origins=["http://localhost:8000", "https://example.com"])`. - - Default : '*' - :type origins: list, string or regex - - :param methods: - The method or list of methods which the allowed origins are allowed to - access for non-simple requests. - - Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE] - :type methods: list or string - - :param expose_headers: - The header or list which are safe to expose to the API of a CORS API - specification. - - Default : None - :type expose_headers: list or string - - :param allow_headers: - The header or list of header field names which can be used when this - resource is accessed by allowed origins. The header(s) may be regular - expressions, case-sensitive strings, or else an asterisk. - - Default : '*', allow all headers - :type allow_headers: list, string or regex - - :param supports_credentials: - Allows users to make authenticated requests. If true, injects the - `Access-Control-Allow-Credentials` header in responses. This allows - cookies and credentials to be submitted across domains. - - :note: This option cannot be used in conjunction with a '*' origin - - Default : False - :type supports_credentials: bool - - :param max_age: - The maximum time for which this CORS request maybe cached. This value - is set as the `Access-Control-Max-Age` header. - - Default : None - :type max_age: timedelta, integer, string or None - - :param send_wildcard: If True, and the origins parameter is `*`, a wildcard - `Access-Control-Allow-Origin` header is sent, rather than the - request's `Origin` header. - - Default : False - :type send_wildcard: bool - - :param vary_header: - If True, the header Vary: Origin will be returned as per the W3 - implementation guidelines. - - Setting this header when the `Access-Control-Allow-Origin` is - dynamically generated (e.g. when there is more than one allowed - origin, and an Origin than '*' is returned) informs CDNs and other - caches that the CORS headers are dynamic, and cannot be cached. - - If False, the Vary header will never be injected or altered. - - Default : True - :type vary_header: bool - - :param allow_private_network: - If True, the response header `Access-Control-Allow-Private-Network` - will be set with the value 'true' whenever the request header - `Access-Control-Request-Private-Network` has a value 'true'. - - If False, the response header `Access-Control-Allow-Private-Network` - will be set with the value 'false' whenever the request header - `Access-Control-Request-Private-Network` has a value of 'true'. - - If the request header `Access-Control-Request-Private-Network` is - not present or has a value other than 'true', the response header - `Access-Control-Allow-Private-Network` will not be set. - - Default : True - :type allow_private_network: bool - """ - - def __init__(self, app=None, **kwargs): - self._options = kwargs - if app is not None: - self.init_app(app, **kwargs) - - def init_app(self, app, **kwargs): - # The resources and options may be specified in the App Config, the CORS constructor - # or the kwargs to the call to init_app. - options = get_cors_options(app, self._options, kwargs) - - # Flatten our resources into a list of the form - # (pattern_or_regexp, dictionary_of_options) - resources = parse_resources(options.get("resources")) - - # Compute the options for each resource by combining the options from - # the app's configuration, the constructor, the kwargs to init_app, and - # finally the options specified in the resources dictionary. - resources = [(pattern, get_cors_options(app, options, opts)) for (pattern, opts) in resources] - - # Create a human-readable form of these resources by converting the compiled - # regular expressions into strings. - resources_human = {get_regexp_pattern(pattern): opts for (pattern, opts) in resources} - LOG.debug("Configuring CORS with resources: %s", resources_human) - - cors_after_request = make_after_request_function(resources) - app.after_request(cors_after_request) - - # Wrap exception handlers with cross_origin - # These error handlers will still respect the behavior of the route - if options.get("intercept_exceptions", True): - - def _after_request_decorator(f): - def wrapped_function(*args, **kwargs): - return cors_after_request(app.make_response(f(*args, **kwargs))) - - return wrapped_function - - if hasattr(app, "handle_exception"): - app.handle_exception = _after_request_decorator(app.handle_exception) - app.handle_user_exception = _after_request_decorator(app.handle_user_exception) - - -def make_after_request_function(resources): - def cors_after_request(resp): - # If CORS headers are set in a view decorator, pass - if resp.headers is not None and resp.headers.get(ACL_ORIGIN): - LOG.debug("CORS have been already evaluated, skipping") - return resp - normalized_path = unquote(request.path) - for res_regex, res_options in resources: - if try_match_pattern(normalized_path, res_regex, caseSensitive=True): - LOG.debug( - "Request to '%r' matches CORS resource '%s'. Using options: %s", - request.path, - get_regexp_pattern(res_regex), - res_options, - ) - set_cors_headers(resp, res_options) - break - else: - LOG.debug("No CORS rule matches") - return resp - - return cors_after_request diff --git a/.venv/lib/python3.12/site-packages/flask_cors/version.py b/.venv/lib/python3.12/site-packages/flask_cors/version.py deleted file mode 100644 index 79a961b..0000000 --- a/.venv/lib/python3.12/site-packages/flask_cors/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "6.0.1" diff --git a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt b/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt deleted file mode 100644 index 7b190ca..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/METADATA b/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/METADATA deleted file mode 100644 index ddf5464..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.2.0 -Summary: Safely pass data to untrusted environments and back. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/itsdangerous/ - -# ItsDangerous - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -## A Simple Example - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -```python -from itsdangerous import URLSafeSerializer -auth_s = URLSafeSerializer("secret key", "auth") -token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - -print(token) -# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - -data = auth_s.loads(token) -print(data["name"]) -# itsdangerous -``` - - -## Donate - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/RECORD b/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/RECORD deleted file mode 100644 index 245f43e..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924 -itsdangerous-2.2.0.dist-info/RECORD,, -itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427 -itsdangerous/__pycache__/__init__.cpython-312.pyc,, -itsdangerous/__pycache__/_json.cpython-312.pyc,, -itsdangerous/__pycache__/encoding.cpython-312.pyc,, -itsdangerous/__pycache__/exc.cpython-312.pyc,, -itsdangerous/__pycache__/serializer.cpython-312.pyc,, -itsdangerous/__pycache__/signer.cpython-312.pyc,, -itsdangerous/__pycache__/timed.cpython-312.pyc,, -itsdangerous/__pycache__/url_safe.cpython-312.pyc,, -itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473 -itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409 -itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601 -itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647 -itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083 -itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505 diff --git a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__init__.py b/.venv/lib/python3.12/site-packages/itsdangerous/__init__.py deleted file mode 100644 index ea55256..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " ItsDangerous 2.3. Use feature detection or" - " 'importlib.metadata.version(\"itsdangerous\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("itsdangerous") - - raise AttributeError(name) diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 3c7b5c1..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc deleted file mode 100644 index 2edff59..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/_json.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc deleted file mode 100644 index 570890c..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/encoding.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc deleted file mode 100644 index c9ade9e..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/exc.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc deleted file mode 100644 index 4c0b66c..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/serializer.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc deleted file mode 100644 index 9c5f95f..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/signer.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc deleted file mode 100644 index 94a1142..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/timed.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc b/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc deleted file mode 100644 index 0ba369b..0000000 Binary files a/.venv/lib/python3.12/site-packages/itsdangerous/__pycache__/url_safe.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/_json.py b/.venv/lib/python3.12/site-packages/itsdangerous/_json.py deleted file mode 100644 index fc23fea..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: str | bytes) -> t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: t.Any, **kwargs: t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/encoding.py b/.venv/lib/python3.12/site-packages/itsdangerous/encoding.py deleted file mode 100644 index f5ca80f..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -import base64 -import string -import struct -import typing as t - -from .exc import BadData - - -def want_bytes( - s: str | bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: str | bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: str | bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/exc.py b/.venv/lib/python3.12/site-packages/itsdangerous/exc.py deleted file mode 100644 index a75adcd..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import annotations - -import typing as t -from datetime import datetime - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: t.Any | None = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: t.Any | None = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - date_signed: datetime | None = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - header: t.Any | None = None, - original_error: Exception | None = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: t.Any | None = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: Exception | None = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/py.typed b/.venv/lib/python3.12/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/serializer.py b/.venv/lib/python3.12/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 5ddf387..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,406 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import json -import typing as t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -if t.TYPE_CHECKING: - import typing_extensions as te - - # This should be either be str or bytes. To avoid having to specify the - # bound type, it falls back to a union if structural matching fails. - _TSerialized = te.TypeVar( - "_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes] - ) -else: - # Still available at runtime on Python < 3.13, but without the default. - _TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes]) - - -class _PDataSerializer(t.Protocol[_TSerialized]): - def loads(self, payload: _TSerialized, /) -> t.Any: ... - # A signature with additional arguments is not handled correctly by type - # checkers right now, so an overload is used below for serializers that - # don't match this strict protocol. - def dumps(self, obj: t.Any, /) -> _TSerialized: ... - - -# Use TypeIs once it's available in typing_extensions or 3.13. -def is_text_serializer( - serializer: _PDataSerializer[t.Any], -) -> te.TypeGuard[_PDataSerializer[str]]: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer(t.Generic[_TSerialized]): - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _PDataSerializer[t.Any] = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: type[Signer] = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = [] - - # Serializer[str] if no data serializer is provided, or if it returns str. - @t.overload - def __init__( - self: Serializer[str], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: None | _PDataSerializer[str] = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer positional argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer keyword argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a positional argument. If the strict signature of - # _PDataSerializer doesn't match, fall back to a union, requiring the user - # to specify the type. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a keyword argument. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: t.Any | None = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _PDataSerializer[_TSerialized] = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: type[Signer] = signer - self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers) - - self.fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = fallback_signers - self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None - ) -> t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - use_serializer = self.serializer - is_text = self.is_text_serializer - else: - use_serializer = serializer - is_text = is_text_serializer(serializer) - - try: - if is_text: - return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type] - - return use_serializer.loads(payload) # type: ignore[arg-type] - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: str | bytes | None = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") # type: ignore[return-value] - - return rv # type: ignore[return-value] - - def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any - ) -> t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: str | bytes, salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: str | bytes, - salt: str | bytes | None, - load_kwargs: dict[str, t.Any] | None = None, - load_payload_kwargs: dict[str, t.Any] | None = None, - ) -> tuple[bool, t.Any]: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe( - self, f: t.IO[t.Any], salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/signer.py b/.venv/lib/python3.12/site-packages/itsdangerous/signer.py deleted file mode 100644 index e324dc0..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,266 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import hashlib -import hmac -import typing as t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - def __init__(self, digest_method: t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list( - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], -) -> list[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] # pyright: ignore - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous.Signer", - sep: str | bytes = b".", - key_derivation: str | None = None, - digest_method: t.Any | None = None, - algorithm: SigningAlgorithm | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: str | bytes | None = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: str | bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: str | bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: str | bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/timed.py b/.venv/lib/python3.12/site-packages/itsdangerous/timed.py deleted file mode 100644 index 7384375..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,228 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import time -import typing as t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import _TSerialized -from .serializer import Serializer -from .signer import Signer - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @t.overload - def unsign( # type: ignore[overload-overlap] - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[False] = False, - ) -> bytes: ... - - @t.overload - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[True] = True, - ) -> tuple[bytes, datetime]: ... - - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - ) -> tuple[bytes, datetime] | bytes: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: int | None = None - ts_dt: datetime | None = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer[_TSerialized]): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: str | bytes | None = None - ) -> cabc.Iterator[TimestampSigner]: - return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - salt: str | bytes | None = None, - ) -> t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - salt: str | bytes | None = None, - ) -> tuple[bool, t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/.venv/lib/python3.12/site-packages/itsdangerous/url_safe.py b/.venv/lib/python3.12/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index 56a0793..0000000 --- a/.venv/lib/python3.12/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations - -import typing as t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import _PDataSerializer -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer[str]): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer: _PDataSerializer[str] = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: t.Any, - serializer: t.Any | None = None, - **kwargs: t.Any, - ) -> t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/METADATA b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/METADATA deleted file mode 100644 index ffef2ff..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/METADATA +++ /dev/null @@ -1,84 +0,0 @@ -Metadata-Version: 2.4 -Name: Jinja2 -Version: 3.1.6 -Summary: A very fast and expressive template engine. -Maintainer-email: Pallets -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: MarkupSafe>=2.0 -Requires-Dist: Babel>=2.7 ; extra == "i18n" -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/jinja/ -Provides-Extra: i18n - -# Jinja - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -## In A Nutshell - -```jinja -{% extends "base.html" %} -{% block title %}Members{% endblock %} -{% block content %} - -{% endblock %} -``` - -## Donate - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/RECORD b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/RECORD deleted file mode 100644 index ffa3866..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/RECORD +++ /dev/null @@ -1,57 +0,0 @@ -jinja2-3.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jinja2-3.1.6.dist-info/METADATA,sha256=aMVUj7Z8QTKhOJjZsx7FDGvqKr3ZFdkh8hQ1XDpkmcg,2871 -jinja2-3.1.6.dist-info/RECORD,, -jinja2-3.1.6.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 -jinja2-3.1.6.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 -jinja2-3.1.6.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -jinja2/__init__.py,sha256=xxepO9i7DHsqkQrgBEduLtfoz2QCuT6_gbL4XSN1hbU,1928 -jinja2/__pycache__/__init__.cpython-312.pyc,, -jinja2/__pycache__/_identifier.cpython-312.pyc,, -jinja2/__pycache__/async_utils.cpython-312.pyc,, -jinja2/__pycache__/bccache.cpython-312.pyc,, -jinja2/__pycache__/compiler.cpython-312.pyc,, -jinja2/__pycache__/constants.cpython-312.pyc,, -jinja2/__pycache__/debug.cpython-312.pyc,, -jinja2/__pycache__/defaults.cpython-312.pyc,, -jinja2/__pycache__/environment.cpython-312.pyc,, -jinja2/__pycache__/exceptions.cpython-312.pyc,, -jinja2/__pycache__/ext.cpython-312.pyc,, -jinja2/__pycache__/filters.cpython-312.pyc,, -jinja2/__pycache__/idtracking.cpython-312.pyc,, -jinja2/__pycache__/lexer.cpython-312.pyc,, -jinja2/__pycache__/loaders.cpython-312.pyc,, -jinja2/__pycache__/meta.cpython-312.pyc,, -jinja2/__pycache__/nativetypes.cpython-312.pyc,, -jinja2/__pycache__/nodes.cpython-312.pyc,, -jinja2/__pycache__/optimizer.cpython-312.pyc,, -jinja2/__pycache__/parser.cpython-312.pyc,, -jinja2/__pycache__/runtime.cpython-312.pyc,, -jinja2/__pycache__/sandbox.cpython-312.pyc,, -jinja2/__pycache__/tests.cpython-312.pyc,, -jinja2/__pycache__/utils.cpython-312.pyc,, -jinja2/__pycache__/visitor.cpython-312.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=vK-PdsuorOMnWSnEkT3iUJRIkTnYgO2T6MnGxDgHI5o,2834 -jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 -jinja2/compiler.py,sha256=9RpCQl5X88BHllJiPsHPh295Hh0uApvwFJNQuutULeM,74131 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=CnHqCDHd-BVGvti_8ZsTolnXNhA3ECsY-6n_2pwU8Hw,6297 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=9nhrP7Ch-NbGX00wvyr4yy-uhNHq2OCc60ggGrni_fk,61513 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=5PF5eHfh8mXAIxXHHRB2xXbXohi8pE3nHSOxa66uS7E,31875 -jinja2/filters.py,sha256=PQ_Egd9n9jSgtnGQYyF4K5j2nYwhUIulhPnyimkdr-k,55212 -jinja2/idtracking.py,sha256=-ll5lIp73pML3ErUYiIJj7tdmWxcH_IlDv3yA_hiZYo,10555 -jinja2/lexer.py,sha256=LYiYio6br-Tep9nPcupWXsPEtjluw3p1mU-lNBVRUfk,29786 -jinja2/loaders.py,sha256=wIrnxjvcbqh5VwW28NSkfotiDq8qNCxIOSFbGUiSLB4,24055 -jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 -jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 -jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 -jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 -jinja2/parser.py,sha256=lLOFy3sEmHc5IaEHRiH1sQVnId2moUQzhyeJZTtdY30,40383 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=gDk-GvdriJXqgsGbHgrcKTP0Yp6zPXzhzrIpCFH3jAU,34249 -jinja2/sandbox.py,sha256=Mw2aitlY2I8la7FYhcX2YG9BtUYcLnD0Gh3d29cDWrY,15009 -jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 -jinja2/utils.py,sha256=rRp3o9e7ZKS4fyrWRbELyLcpuGVTFcnooaOa1qx_FIk,24129 -jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/WHEEL deleted file mode 100644 index 23d2d7e..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.11.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/entry_points.txt b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/entry_points.txt deleted file mode 100644 index abc3eae..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[babel.extractors] -jinja2=jinja2.ext:babel_extract[i18n] - diff --git a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index c37cae4..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.12/site-packages/jinja2/__init__.py b/.venv/lib/python3.12/site-packages/jinja2/__init__.py deleted file mode 100644 index 1a423a3..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" - -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.6" diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a35cfa3..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc deleted file mode 100644 index 5f189e5..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc deleted file mode 100644 index f5ebed8..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc deleted file mode 100644 index 66e213e..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc deleted file mode 100644 index 15ba675..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc deleted file mode 100644 index b440885..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc deleted file mode 100644 index e716599..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc deleted file mode 100644 index 7ee5274..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc deleted file mode 100644 index 65362cc..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 7e998c3..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc deleted file mode 100644 index b0a5b5d..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc deleted file mode 100644 index 02153f1..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc deleted file mode 100644 index 1062eeb..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc deleted file mode 100644 index 2601667..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc deleted file mode 100644 index c1403b1..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc deleted file mode 100644 index ac9066c..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc deleted file mode 100644 index 587bb5d..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc deleted file mode 100644 index b479234..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc deleted file mode 100644 index ee72a0e..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 3283909..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc deleted file mode 100644 index 92e90f0..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc deleted file mode 100644 index 60cee82..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc deleted file mode 100644 index 11f6475..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 43f5348..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc b/.venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc deleted file mode 100644 index 4148e91..0000000 Binary files a/.venv/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc and /dev/null differ diff --git a/.venv/lib/python3.12/site-packages/jinja2/_identifier.py b/.venv/lib/python3.12/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/.venv/lib/python3.12/site-packages/jinja2/async_utils.py b/.venv/lib/python3.12/site-packages/jinja2/async_utils.py deleted file mode 100644 index f0c1402..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,99 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -if t.TYPE_CHECKING: - import typing_extensions as te - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True # type: ignore[attr-defined] - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return value - - -class _IteratorToAsyncIterator(t.Generic[V]): - def __init__(self, iterator: "t.Iterator[V]"): - self._iterator = iterator - - def __aiter__(self) -> "te.Self": - return self - - async def __anext__(self) -> V: - try: - return next(self._iterator) - except StopIteration as e: - raise StopAsyncIteration(e.value) from e - - -def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - return iterable.__aiter__() - else: - return _IteratorToAsyncIterator(iter(iterable)) - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/.venv/lib/python3.12/site-packages/jinja2/bccache.py b/.venv/lib/python3.12/site-packages/jinja2/bccache.py deleted file mode 100644 index ada8b09..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,408 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" - -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: ... - - def set( - self, key: str, value: bytes, timeout: t.Optional[int] = None - ) -> None: ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/.venv/lib/python3.12/site-packages/jinja2/compiler.py b/.venv/lib/python3.12/site-packages/jinja2/compiler.py deleted file mode 100644 index a4ff6a1..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1998 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" - -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(new_func, f) # type: ignore[return-value] - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: # noqa E721 - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "te.Self": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "te.Self": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in ( - (self.filters, visitor.filters, "filters"), - ( - self.tests, - visitor.tests, - "tests", - ), - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(sorted(vars)): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, sorted(public_names))) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import async_exported - from .runtime import exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline("agen = parent_template.root_render_func(context)") - self.writeline("try:") - self.indent() - self.writeline("async for event in agen:") - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent() - self.writeline("finally: await agen.aclose()") - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline(f"gen = context.blocks[{node.name!r}][0]({context})") - self.writeline("try:") - self.indent() - self.writeline( - f"{self.choose_async()}for event in gen:", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - def loop_body() -> None: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.with_context: - self.writeline( - f"gen = template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)}))" - ) - self.writeline("try:") - self.indent() - self.writeline(f"{self.choose_async()}for event in gen:") - loop_body() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - loop_body() - else: - self.writeline("yield from template._get_default_module()._body_stream") - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - # The position will contain the template name, and will be formatted - # into a string that will be compiled into an f-string. Curly braces - # in the name must be replaced with escapes so that they will not be - # executed as part of the f-string. - position = self.position(node).replace("{", "{{").replace("}", "}}") - message = ( - "the template {included_template.__name__!r}" - f" (imported on {position})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - - # ``a.b`` is allowed for assignment, and is parsed as an NSRef. However, - # it is only valid if it references a Namespace object. Emit a check for - # that for each ref here, before assignment code is emitted. This can't - # be done in visit_NSRef as the ref could be in the middle of a tuple. - seen_refs: t.Set[str] = set() - - for nsref in node.find_all(nodes.NSRef): - if nsref.name in seen_refs: - # Only emit the check for each reference once, in case the same - # ref is used multiple times in a tuple, `ns.a, ns.b = c, d`. - continue - - seen_refs.add(nsref.name) - ref = frame.symbols.ref(nsref.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRef is a dotted assignment target a.b=c, but uses a[b]=c internally. - # visit_Assign emits code to validate that each ref is to a Namespace - # object only. That can't be emitted here as the ref could be in the - # middle of a tuple assignment. - ref = frame.symbols.ref(node.name) - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/.venv/lib/python3.12/site-packages/jinja2/constants.py b/.venv/lib/python3.12/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/.venv/lib/python3.12/site-packages/jinja2/debug.py b/.venv/lib/python3.12/site-packages/jinja2/debug.py deleted file mode 100644 index eeeeee7..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: t.Optional[Context] = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/.venv/lib/python3.12/site-packages/jinja2/defaults.py b/.venv/lib/python3.12/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/.venv/lib/python3.12/site-packages/jinja2/environment.py b/.venv/lib/python3.12/site-packages/jinja2/environment.py deleted file mode 100644 index 0fc6e5b..0000000 --- a/.venv/lib/python3.12/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1672 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" - -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping[t.Any, t.Any]], -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: # noqa E721 - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: _env_bound) -> _env_bound: - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = missing, - ) -> "te.Self": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.5 - ``enable_async`` is applied correctly. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "